[llvm] d2856ff - [X86] Enable v32f16 FNEG custom lowering on AVX512 targets

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Thu Nov 30 02:07:31 PST 2023


Author: Simon Pilgrim
Date: 2023-11-30T10:07:01Z
New Revision: d2856ff457ca0563f42ce9a12d7c7537b7234f3a

URL: https://github.com/llvm/llvm-project/commit/d2856ff457ca0563f42ce9a12d7c7537b7234f3a
DIFF: https://github.com/llvm/llvm-project/commit/d2856ff457ca0563f42ce9a12d7c7537b7234f3a.diff

LOG: [X86] Enable v32f16 FNEG custom lowering on AVX512 targets

Added: 
    

Modified: 
    llvm/lib/Target/X86/X86ISelLowering.cpp
    llvm/test/CodeGen/X86/vec_fneg.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 7ce9684b0fa2d36..442178daf5611dd 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -2054,6 +2054,7 @@ X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM,
       for (auto VT : { MVT::v4i32, MVT::v8i32, MVT::v2i64, MVT::v4i64 })
         setOperationAction(ISD::CTPOP, VT, Legal);
     }
+    setOperationAction(ISD::FNEG, MVT::v32f16, Custom);
     setOperationAction(ISD::FABS, MVT::v32f16, Custom);
   }
 

diff  --git a/llvm/test/CodeGen/X86/vec_fneg.ll b/llvm/test/CodeGen/X86/vec_fneg.ll
index ba19bc2dd27955b..b14da0a2c271280 100644
--- a/llvm/test/CodeGen/X86/vec_fneg.ll
+++ b/llvm/test/CodeGen/X86/vec_fneg.ll
@@ -578,320 +578,10 @@ define <32 x half> @fneg_v32f16(ptr %p) nounwind {
 ;
 ; X86-AVX512VL-LABEL: fneg_v32f16:
 ; X86-AVX512VL:       # %bb.0:
-; X86-AVX512VL-NEXT:    subl $128, %esp
 ; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VL-NEXT:    movzwl 60(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm0
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpbroadcastd {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vmovdqa 48(%eax), %xmm3
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vmovdqa %xmm3, %xmm4
-; X86-AVX512VL-NEXT:    vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm2
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vmovd %xmm2, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X86-AVX512VL-NEXT:    movzwl 44(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm2
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vmovd %xmm2, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vmovdqa 32(%eax), %xmm3
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm5
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm5, %xmm5
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm5, %xmm5
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm5, %xmm5
-; X86-AVX512VL-NEXT:    vmovd %xmm5, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm5
-; X86-AVX512VL-NEXT:    movzwl 28(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VL-NEXT:    vmovdqu %xmm1, (%esp) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vmovdqa 16(%eax), %xmm2
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm7
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vmovd %xmm7, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX512VL-NEXT:    movzwl 12(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm7
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vmovd %xmm7, %ecx
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm7
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VL-NEXT:    vmovd %xmm7, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm1
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm1
-; X86-AVX512VL-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VL-NEXT:    movzwl 56(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm6
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vmovd %xmm6, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
-; X86-AVX512VL-NEXT:    movzwl 40(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vmovdqa %xmm3, %xmm4
-; X86-AVX512VL-NEXT:    vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm6
-; X86-AVX512VL-NEXT:    movzwl 24(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VL-NEXT:    movzwl 8(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vmovdqu (%esp), %xmm2 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm7, %ymm1, %ymm1
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm6, %zmm1, %zmm1
-; X86-AVX512VL-NEXT:    vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
-; X86-AVX512VL-NEXT:    # zmm1 = zmm1[0],mem[0],zmm1[1],mem[1],zmm1[4],mem[4],zmm1[5],mem[5],zmm1[8],mem[8],zmm1[9],mem[9],zmm1[12],mem[12],zmm1[13],mem[13]
-; X86-AVX512VL-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VL-NEXT:    movzwl 52(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm3 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm6
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X86-AVX512VL-NEXT:    vmovd %xmm6, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
-; X86-AVX512VL-NEXT:    movzwl 36(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm4, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm6
-; X86-AVX512VL-NEXT:    movzwl 20(%eax), %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Folded Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    movzwl %dx, %edx
-; X86-AVX512VL-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VL-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm7, %ymm1, %ymm1
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm6, %zmm1, %zmm6
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm4
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm4
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X86-AVX512VL-NEXT:    vmovd %xmm4, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm3
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm3
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm3, %xmm3
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm3, %xmm3
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm3, %xmm3
-; X86-AVX512VL-NEXT:    vmovd %xmm3, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm4, %ymm1, %ymm3
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm2
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vmovd %xmm2, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X86-AVX512VL-NEXT:    vmovdqu (%esp), %xmm4 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX512VL-NEXT:    movzwl %ax, %eax
-; X86-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm4, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %ecx
-; X86-AVX512VL-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VL-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vcvtps2ph $4, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vmovd %xmm0, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
-; X86-AVX512VL-NEXT:    vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
-; X86-AVX512VL-NEXT:    # zmm0 = zmm0[0],mem[0],zmm0[2],mem[2],zmm0[4],mem[4],zmm0[6],mem[6]
-; X86-AVX512VL-NEXT:    addl $128, %esp
+; X86-AVX512VL-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X86-AVX512VL-NEXT:    vpxorq (%eax), %zmm0, %zmm0
 ; X86-AVX512VL-NEXT:    retl
 ;
 ; X86-AVX512FP16-LABEL: fneg_v32f16:
@@ -903,320 +593,10 @@ define <32 x half> @fneg_v32f16(ptr %p) nounwind {
 ;
 ; X86-AVX512VLDQ-LABEL: fneg_v32f16:
 ; X86-AVX512VLDQ:       # %bb.0:
-; X86-AVX512VLDQ-NEXT:    subl $128, %esp
 ; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VLDQ-NEXT:    movzwl 60(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm0
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpbroadcastd {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqa 48(%eax), %xmm3
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vmovdqa %xmm3, %xmm4
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm2, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 44(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm2, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vmovdqa 32(%eax), %xmm3
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm5
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm5, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm5, %xmm5
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm5, %xmm5
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm5, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1],xmm2[2],xmm5[2],xmm2[3],xmm5[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm5
-; X86-AVX512VLDQ-NEXT:    movzwl 28(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm1, (%esp) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vmovdqa 16(%eax), %xmm2
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm7, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 12(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm7, %ecx
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm7, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm1
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VLDQ-NEXT:    movzwl 56(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm6
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm6, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 40(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vmovdqa %xmm3, %xmm4
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm3, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm6
-; X86-AVX512VLDQ-NEXT:    movzwl 24(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 8(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovdqu (%esp), %xmm2 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm7, %ymm1, %ymm1
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm6, %zmm1, %zmm1
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{[-0-9]+}}(%e{{[sb]}}p), %zmm1, %zmm1 # 64-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    # zmm1 = zmm1[0],mem[0],zmm1[1],mem[1],zmm1[4],mem[4],zmm1[5],mem[5],zmm1[8],mem[8],zmm1[9],mem[9],zmm1[12],mem[12],zmm1[13],mem[13]
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VLDQ-NEXT:    movzwl 52(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm3 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm6
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm6, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 36(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm4, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm1, %ymm6
-; X86-AVX512VLDQ-NEXT:    movzwl 20(%eax), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    movzwl %dx, %edx
-; X86-AVX512VLDQ-NEXT:    vmovd %edx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1],xmm1[2],xmm5[2],xmm1[3],xmm5[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm7, %ymm1, %ymm1
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm6, %zmm1, %zmm6
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm4
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm4, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm3
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm3, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm3, %xmm3
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm3, %xmm3
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm3, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm4, %ymm1, %ymm3
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm2, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqu (%esp), %xmm4 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X86-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm4, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %ecx
-; X86-AVX512VLDQ-NEXT:    movzwl %cx, %ecx
-; X86-AVX512VLDQ-NEXT:    vmovd %ecx, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovd %xmm0, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
-; X86-AVX512VLDQ-NEXT:    vpunpcklqdq {{[-0-9]+}}(%e{{[sb]}}p), %zmm0, %zmm0 # 64-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    # zmm0 = zmm0[0],mem[0],zmm0[2],mem[2],zmm0[4],mem[4],zmm0[6],mem[6]
-; X86-AVX512VLDQ-NEXT:    addl $128, %esp
+; X86-AVX512VLDQ-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X86-AVX512VLDQ-NEXT:    vpxorq (%eax), %zmm0, %zmm0
 ; X86-AVX512VLDQ-NEXT:    retl
 ;
 ; X64-SSE-LABEL: fneg_v32f16:
@@ -1247,302 +627,9 @@ define <32 x half> @fneg_v32f16(ptr %p) nounwind {
 ;
 ; X64-AVX512VL-LABEL: fneg_v32f16:
 ; X64-AVX512VL:       # %bb.0:
-; X64-AVX512VL-NEXT:    movzwl 60(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm0
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm0, %xmm1
-; X64-AVX512VL-NEXT:    vpbroadcastd {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X64-AVX512VL-NEXT:    vmovd %xmm1, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VL-NEXT:    vmovdqa 16(%rdi), %xmm2
-; X64-AVX512VL-NEXT:    vmovdqa 32(%rdi), %xmm3
-; X64-AVX512VL-NEXT:    vmovdqa 48(%rdi), %xmm4
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX512VL-NEXT:    movzwl 44(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VL-NEXT:    movzwl 28(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VL-NEXT:    movzwl 12(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm5
-; X64-AVX512VL-NEXT:    movzwl 56(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VL-NEXT:    movzwl 40(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VL-NEXT:    movzwl 24(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VL-NEXT:    movzwl 8(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm9
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vmovd %xmm9, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm6, %zmm7, %zmm6
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm5 = zmm6[0],zmm5[0],zmm6[1],zmm5[1],zmm6[4],zmm5[4],zmm6[5],zmm5[5],zmm6[8],zmm5[8],zmm6[9],zmm5[9],zmm6[12],zmm5[12],zmm6[13],zmm5[13]
-; X64-AVX512VL-NEXT:    movzwl 52(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm4, %xmm7
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VL-NEXT:    movzwl 36(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm3, %xmm8
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VL-NEXT:    movzwl 20(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm8
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VL-NEXT:    movzwl 4(%rdi), %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VL-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm9
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm9
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm9, %xmm9
-; X64-AVX512VL-NEXT:    vmovd %xmm9, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm6, %zmm7, %zmm6
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VL-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm3
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm3
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm3, %xmm3
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm3, %xmm3
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm3, %xmm3
-; X64-AVX512VL-NEXT:    vmovd %xmm3, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm4, %ymm3, %ymm3
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm2
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vmovd %xmm2, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm2
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VL-NEXT:    movzwl %ax, %eax
-; X64-AVX512VL-NEXT:    vmovd %eax, %xmm1
-; X64-AVX512VL-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X64-AVX512VL-NEXT:    vpxor %xmm0, %xmm1, %xmm0
-; X64-AVX512VL-NEXT:    vcvtps2ph $4, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
-; X64-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm5[0],zmm0[2],zmm5[2],zmm0[4],zmm5[4],zmm0[6],zmm5[6]
+; X64-AVX512VL-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X64-AVX512VL-NEXT:    vpxorq (%rdi), %zmm0, %zmm0
 ; X64-AVX512VL-NEXT:    retq
 ;
 ; X64-AVX512FP16-LABEL: fneg_v32f16:
@@ -1553,302 +640,9 @@ define <32 x half> @fneg_v32f16(ptr %p) nounwind {
 ;
 ; X64-AVX512VLDQ-LABEL: fneg_v32f16:
 ; X64-AVX512VLDQ:       # %bb.0:
-; X64-AVX512VLDQ-NEXT:    movzwl 60(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm0
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm0, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpbroadcastd {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm1
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm1, %xmm1
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm1, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VLDQ-NEXT:    vmovdqa 16(%rdi), %xmm2
-; X64-AVX512VLDQ-NEXT:    vmovdqa 32(%rdi), %xmm3
-; X64-AVX512VLDQ-NEXT:    vmovdqa 48(%rdi), %xmm4
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 44(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VLDQ-NEXT:    movzwl 28(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 12(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm5
-; X64-AVX512VLDQ-NEXT:    movzwl 56(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 40(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VLDQ-NEXT:    movzwl 24(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 8(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm9
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm9, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm6, %zmm7, %zmm6
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm5 = zmm6[0],zmm5[0],zmm6[1],zmm5[1],zmm6[4],zmm5[4],zmm6[5],zmm5[5],zmm6[8],zmm5[8],zmm6[9],zmm5[9],zmm6[12],zmm5[12],zmm6[13],zmm5[13]
-; X64-AVX512VLDQ-NEXT:    movzwl 52(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm6, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm4, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 36(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm3, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX512VLDQ-NEXT:    movzwl 20(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    movzwl 4(%rdi), %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm8, %xmm8
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm8, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm9
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm9, %xmm9
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm9, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm6, %zmm7, %zmm6
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm7, %xmm7
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm7, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm3
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm3, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm3, %xmm3
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm3, %xmm3
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm3, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm4, %ymm3, %ymm3
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm2
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm2, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm4, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VLDQ-NEXT:    movzwl %ax, %eax
-; X64-AVX512VLDQ-NEXT:    vmovd %eax, %xmm1
-; X64-AVX512VLDQ-NEXT:    vcvtph2ps %xmm1, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpxor %xmm0, %xmm1, %xmm0
-; X64-AVX512VLDQ-NEXT:    vcvtps2ph $4, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    vmovd %xmm0, %eax
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm6[0],zmm0[1],zmm6[1],zmm0[4],zmm6[4],zmm0[5],zmm6[5],zmm0[8],zmm6[8],zmm0[9],zmm6[9],zmm0[12],zmm6[12],zmm0[13],zmm6[13]
-; X64-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm5[0],zmm0[2],zmm5[2],zmm0[4],zmm5[4],zmm0[6],zmm5[6]
+; X64-AVX512VLDQ-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X64-AVX512VLDQ-NEXT:    vpxorq (%rdi), %zmm0, %zmm0
 ; X64-AVX512VLDQ-NEXT:    retq
   %v = load <32 x half>, ptr %p, align 16
   %nnv = fsub <32 x half> <half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0, half -0.0>, %v


        


More information about the llvm-commits mailing list