[llvm] 1d20b00 - [X86] Enable v8f16/v16f16/v32f16 FCOPYSIGN custom lowering on SSE2/AVX/AVX512 targets

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Thu Nov 30 03:48:55 PST 2023


Author: Simon Pilgrim
Date: 2023-11-30T11:48:33Z
New Revision: 1d20b009a0e274c0db518b0bca0dd5daabcc8754

URL: https://github.com/llvm/llvm-project/commit/1d20b009a0e274c0db518b0bca0dd5daabcc8754
DIFF: https://github.com/llvm/llvm-project/commit/1d20b009a0e274c0db518b0bca0dd5daabcc8754.diff

LOG: [X86] Enable v8f16/v16f16/v32f16 FCOPYSIGN custom lowering on SSE2/AVX/AVX512 targets

Added: 
    

Modified: 
    llvm/lib/Target/X86/X86ISelLowering.cpp
    llvm/test/CodeGen/X86/vec_fcopysign.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 442178daf5611dd..6167be7bdf84e9f 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -1137,6 +1137,7 @@ X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM,
     setOperationAction(ISD::FDIV, MVT::v8f16, Expand);
     setOperationAction(ISD::FNEG, MVT::v8f16, Custom);
     setOperationAction(ISD::FABS, MVT::v8f16, Custom);
+    setOperationAction(ISD::FCOPYSIGN, MVT::v8f16, Custom);
 
     // Custom lower v2i64 and v2f64 selects.
     setOperationAction(ISD::SELECT,             MVT::v2f64, Custom);
@@ -1598,6 +1599,7 @@ X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM,
     setF16Action(MVT::v16f16, Expand);
     setOperationAction(ISD::FNEG, MVT::v16f16, Custom);
     setOperationAction(ISD::FABS, MVT::v16f16, Custom);
+    setOperationAction(ISD::FCOPYSIGN, MVT::v16f16, Custom);
     setOperationAction(ISD::FADD, MVT::v16f16, Expand);
     setOperationAction(ISD::FSUB, MVT::v16f16, Expand);
     setOperationAction(ISD::FMUL, MVT::v16f16, Expand);
@@ -2056,6 +2058,7 @@ X86TargetLowering::X86TargetLowering(const X86TargetMachine &TM,
     }
     setOperationAction(ISD::FNEG, MVT::v32f16, Custom);
     setOperationAction(ISD::FABS, MVT::v32f16, Custom);
+    setOperationAction(ISD::FCOPYSIGN, MVT::v32f16, Custom);
   }
 
   // This block control legalization of v32i1/v64i1 which are available with

diff  --git a/llvm/test/CodeGen/X86/vec_fcopysign.ll b/llvm/test/CodeGen/X86/vec_fcopysign.ll
index 631f6c2d0259bc1..457210732396bdd 100644
--- a/llvm/test/CodeGen/X86/vec_fcopysign.ll
+++ b/llvm/test/CodeGen/X86/vec_fcopysign.ll
@@ -125,779 +125,77 @@ define <8 x half> @fcopysign_v8f16(ptr %p0, ptr %p1) nounwind {
 ; X86-SSE:       # %bb.0:
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-SSE-NEXT:    movdqa (%ecx), %xmm1
-; X86-SSE-NEXT:    movdqa (%eax), %xmm2
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm2[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm3
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm3
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm3
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm2[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm3
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm1[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    movaps %xmm2, %xmm4
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm4 = xmm4[1,1],xmm2[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm1, %xmm4
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm4 = xmm4[1,1],xmm1[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    psrld $16, %xmm2
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
+; X86-SSE-NEXT:    movaps (%ecx), %xmm0
+; X86-SSE-NEXT:    movaps (%eax), %xmm1
+; X86-SSE-NEXT:    andps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1
+; X86-SSE-NEXT:    andps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0
+; X86-SSE-NEXT:    orps %xmm1, %xmm0
 ; X86-SSE-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: fcopysign_v8f16:
 ; X86-AVX1:       # %bb.0:
-; X86-AVX1-NEXT:    pushl %esi
-; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %ecx
 ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX1-NEXT:    vmovdqa (%eax), %xmm0
-; X86-AVX1-NEXT:    vmovdqa (%ecx), %xmm1
-; X86-AVX1-NEXT:    vbroadcastss 12(%ecx), %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 12(%eax), %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
-; X86-AVX1-NEXT:    vbroadcastss 8(%ecx), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 8(%eax), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X86-AVX1-NEXT:    vbroadcastss 4(%ecx), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X86-AVX1-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 4(%eax), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X86-AVX1-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %ecx, %eax
-; X86-AVX1-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X86-AVX1-NEXT:    popl %esi
+; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-AVX1-NEXT:    vmovaps (%ecx), %xmm0
+; X86-AVX1-NEXT:    vmovaps (%eax), %xmm1
+; X86-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm1, %xmm1
+; X86-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}, %xmm0, %xmm0
+; X86-AVX1-NEXT:    vorps %xmm1, %xmm0, %xmm0
 ; X86-AVX1-NEXT:    retl
 ;
 ; X86-AVX2-LABEL: fcopysign_v8f16:
 ; X86-AVX2:       # %bb.0:
-; X86-AVX2-NEXT:    pushl %edi
-; X86-AVX2-NEXT:    pushl %esi
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX2-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX2-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX2-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %esi
-; X86-AVX2-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX2-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    movl 12(%eax), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 12(%ecx), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    movl 8(%eax), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 8(%ecx), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    andl 4(%eax), %edx
-; X86-AVX2-NEXT:    movzwl 4(%ecx), %eax
-; X86-AVX2-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %eax
-; X86-AVX2-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X86-AVX2-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X86-AVX2-NEXT:    popl %esi
-; X86-AVX2-NEXT:    popl %edi
+; X86-AVX2-NEXT:    vpbroadcastw {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X86-AVX2-NEXT:    vpand (%ecx), %xmm0, %xmm0
+; X86-AVX2-NEXT:    vpbroadcastw {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX2-NEXT:    vpand (%eax), %xmm1, %xmm1
+; X86-AVX2-NEXT:    vpor %xmm0, %xmm1, %xmm0
 ; X86-AVX2-NEXT:    retl
 ;
-; X86-AVX512VL-LABEL: fcopysign_v8f16:
-; X86-AVX512VL:       # %bb.0:
-; X86-AVX512VL-NEXT:    pushl %edi
-; X86-AVX512VL-NEXT:    pushl %esi
-; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VL-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX512VL-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX512VL-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %esi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    movl 12(%eax), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 12(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    movl 8(%eax), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 8(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    andl 4(%eax), %edx
-; X86-AVX512VL-NEXT:    movzwl 4(%ecx), %eax
-; X86-AVX512VL-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X86-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X86-AVX512VL-NEXT:    popl %esi
-; X86-AVX512VL-NEXT:    popl %edi
-; X86-AVX512VL-NEXT:    retl
-;
-; X86-AVX512FP16-LABEL: fcopysign_v8f16:
-; X86-AVX512FP16:       # %bb.0:
-; X86-AVX512FP16-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512FP16-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512FP16-NEXT:    vmovdqa (%ecx), %xmm1
-; X86-AVX512FP16-NEXT:    vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
-; X86-AVX512FP16-NEXT:    vpternlogq $202, (%eax), %xmm1, %xmm0
-; X86-AVX512FP16-NEXT:    retl
-;
-; X86-AVX512VLDQ-LABEL: fcopysign_v8f16:
-; X86-AVX512VLDQ:       # %bb.0:
-; X86-AVX512VLDQ-NEXT:    pushl %edi
-; X86-AVX512VLDQ-NEXT:    pushl %esi
-; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    movl 12(%eax), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 12(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    movl 8(%eax), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 8(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    andl 4(%eax), %edx
-; X86-AVX512VLDQ-NEXT:    movzwl 4(%ecx), %eax
-; X86-AVX512VLDQ-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X86-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X86-AVX512VLDQ-NEXT:    popl %esi
-; X86-AVX512VLDQ-NEXT:    popl %edi
-; X86-AVX512VLDQ-NEXT:    retl
+; X86-AVX512-LABEL: fcopysign_v8f16:
+; X86-AVX512:       # %bb.0:
+; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-AVX512-NEXT:    vmovdqa (%ecx), %xmm1
+; X86-AVX512-NEXT:    vpbroadcastw {{.*#+}} xmm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX512-NEXT:    vpternlogq $202, (%eax), %xmm1, %xmm0
+; X86-AVX512-NEXT:    retl
 ;
 ; X64-SSE-LABEL: fcopysign_v8f16:
 ; X64-SSE:       # %bb.0:
-; X64-SSE-NEXT:    movdqa (%rdi), %xmm1
-; X64-SSE-NEXT:    movdqa (%rsi), %xmm2
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm2[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm3
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm3
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm3
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm2[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm3
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm3 = xmm3[1],xmm1[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    movaps %xmm2, %xmm4
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm4 = xmm4[1,1],xmm2[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm1, %xmm4
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm4 = xmm4[1,1],xmm1[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    psrld $16, %xmm2
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
+; X64-SSE-NEXT:    movaps (%rdi), %xmm0
+; X64-SSE-NEXT:    movaps (%rsi), %xmm1
+; X64-SSE-NEXT:    andps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1
+; X64-SSE-NEXT:    andps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0
+; X64-SSE-NEXT:    orps %xmm1, %xmm0
 ; X64-SSE-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: fcopysign_v8f16:
 ; X64-AVX1:       # %bb.0:
-; X64-AVX1-NEXT:    vmovdqa (%rdi), %xmm0
-; X64-AVX1-NEXT:    vmovdqa (%rsi), %xmm1
-; X64-AVX1-NEXT:    vbroadcastss 12(%rsi), %xmm2
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 12(%rdi), %xmm2
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm3
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
-; X64-AVX1-NEXT:    vbroadcastss 8(%rsi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 8(%rdi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm3
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X64-AVX1-NEXT:    vbroadcastss 4(%rsi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 4(%rdi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm3
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm4[0],xmm0[0],xmm4[1],xmm0[1],xmm4[2],xmm0[2],xmm4[3],xmm0[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
+; X64-AVX1-NEXT:    vmovaps (%rdi), %xmm0
+; X64-AVX1-NEXT:    vmovaps (%rsi), %xmm1
+; X64-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1, %xmm1
+; X64-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0
+; X64-AVX1-NEXT:    vorps %xmm1, %xmm0, %xmm0
 ; X64-AVX1-NEXT:    retq
 ;
 ; X64-AVX2-LABEL: fcopysign_v8f16:
 ; X64-AVX2:       # %bb.0:
-; X64-AVX2-NEXT:    vmovdqa (%rdi), %xmm0
-; X64-AVX2-NEXT:    vmovdqa (%rsi), %xmm1
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX2-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX2-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX2-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X64-AVX2-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X64-AVX2-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX2-NEXT:    andl 4(%rsi), %eax
-; X64-AVX2-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X64-AVX2-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
+; X64-AVX2-NEXT:    vpbroadcastw {{.*#+}} xmm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X64-AVX2-NEXT:    vpand (%rsi), %xmm0, %xmm0
+; X64-AVX2-NEXT:    vpbroadcastw {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-AVX2-NEXT:    vpand (%rdi), %xmm1, %xmm1
+; X64-AVX2-NEXT:    vpor %xmm0, %xmm1, %xmm0
 ; X64-AVX2-NEXT:    retq
 ;
-; X64-AVX512VL-LABEL: fcopysign_v8f16:
-; X64-AVX512VL:       # %bb.0:
-; X64-AVX512VL-NEXT:    vmovdqa (%rdi), %xmm0
-; X64-AVX512VL-NEXT:    vmovdqa (%rsi), %xmm1
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VL-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX512VL-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VL-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X64-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X64-AVX512VL-NEXT:    retq
-;
-; X64-AVX512FP16-LABEL: fcopysign_v8f16:
-; X64-AVX512FP16:       # %bb.0:
-; X64-AVX512FP16-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512FP16-NEXT:    vpbroadcastq {{.*#+}} xmm0 = [9223231297218904063,9223231297218904063]
-; X64-AVX512FP16-NEXT:    vpternlogq $202, (%rsi), %xmm1, %xmm0
-; X64-AVX512FP16-NEXT:    retq
-;
-; X64-AVX512VLDQ-LABEL: fcopysign_v8f16:
-; X64-AVX512VLDQ:       # %bb.0:
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rdi), %xmm0
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rsi), %xmm1
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX512VLDQ-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VLDQ-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X64-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm2[0]
-; X64-AVX512VLDQ-NEXT:    retq
+; X64-AVX512-LABEL: fcopysign_v8f16:
+; X64-AVX512:       # %bb.0:
+; X64-AVX512-NEXT:    vmovdqa (%rdi), %xmm1
+; X64-AVX512-NEXT:    vpbroadcastq {{.*#+}} xmm0 = [9223231297218904063,9223231297218904063]
+; X64-AVX512-NEXT:    vpternlogq $202, (%rsi), %xmm1, %xmm0
+; X64-AVX512-NEXT:    retq
   %a0 = load <8 x half>, ptr %p0, align 16
   %a1 = load <8 x half>, ptr %p1, align 16
   %t = call <8 x half> @llvm.copysign.v8f16(<8 x half> %a0, <8 x half> %a1)
@@ -1068,1485 +366,87 @@ define <16 x half> @fcopysign_v16f16(ptr %p0, ptr %p1) nounwind {
 ; X86-SSE:       # %bb.0:
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-SSE-NEXT:    movdqa (%ecx), %xmm1
-; X86-SSE-NEXT:    movdqa 16(%ecx), %xmm2
-; X86-SSE-NEXT:    movdqa (%eax), %xmm4
-; X86-SSE-NEXT:    movdqa 16(%eax), %xmm3
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm4[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm5
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm5
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm5
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm4[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm5
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm1[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    movaps %xmm4, %xmm6
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm6 = xmm6[1,1],xmm4[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm1, %xmm6
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm6 = xmm6[1,1],xmm1[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    psrld $16, %xmm4
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0]
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm3[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm2[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm4
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm4
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm4
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm3[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm4
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm2[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X86-SSE-NEXT:    psrlq $48, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X86-SSE-NEXT:    psrlq $48, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    movaps %xmm3, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm3[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm2, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    psrld $16, %xmm3
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm2
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0]
+; X86-SSE-NEXT:    movaps {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-SSE-NEXT:    movaps %xmm1, %xmm2
+; X86-SSE-NEXT:    andnps (%ecx), %xmm2
+; X86-SSE-NEXT:    movaps (%eax), %xmm0
+; X86-SSE-NEXT:    andps %xmm1, %xmm0
+; X86-SSE-NEXT:    orps %xmm2, %xmm0
+; X86-SSE-NEXT:    movaps %xmm1, %xmm2
+; X86-SSE-NEXT:    andnps 16(%ecx), %xmm2
+; X86-SSE-NEXT:    andps 16(%eax), %xmm1
+; X86-SSE-NEXT:    orps %xmm2, %xmm1
 ; X86-SSE-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: fcopysign_v16f16:
 ; X86-AVX1:       # %bb.0:
-; X86-AVX1-NEXT:    pushl %esi
 ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX1-NEXT:    vbroadcastss 28(%ecx), %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 28(%eax), %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX1-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX1-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vbroadcastss 24(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 24(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
-; X86-AVX1-NEXT:    vbroadcastss 20(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 20(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm3, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0]
-; X86-AVX1-NEXT:    vbroadcastss 12(%ecx), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 12(%eax), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX1-NEXT:    vbroadcastss 8(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 8(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
-; X86-AVX1-NEXT:    vbroadcastss 4(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X86-AVX1-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 4(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X86-AVX1-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %ecx, %eax
-; X86-AVX1-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
-; X86-AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX1-NEXT:    popl %esi
+; X86-AVX1-NEXT:    vmovups (%ecx), %ymm0
+; X86-AVX1-NEXT:    vmovups (%eax), %ymm1
+; X86-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm1, %ymm1
+; X86-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}, %ymm0, %ymm0
+; X86-AVX1-NEXT:    vorps %ymm1, %ymm0, %ymm0
 ; X86-AVX1-NEXT:    retl
 ;
 ; X86-AVX2-LABEL: fcopysign_v16f16:
 ; X86-AVX2:       # %bb.0:
-; X86-AVX2-NEXT:    pushl %edi
-; X86-AVX2-NEXT:    pushl %esi
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX2-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX2-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX2-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX2-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX2-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %esi
-; X86-AVX2-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX2-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    movl 28(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    movl 12(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 12(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    movl 24(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 24(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    movl 8(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 8(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX2-NEXT:    movl 20(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 20(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    andl 4(%ecx), %edx
-; X86-AVX2-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX2-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %eax
-; X86-AVX2-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X86-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X86-AVX2-NEXT:    popl %esi
-; X86-AVX2-NEXT:    popl %edi
+; X86-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X86-AVX2-NEXT:    vpand (%ecx), %ymm0, %ymm0
+; X86-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX2-NEXT:    vpand (%eax), %ymm1, %ymm1
+; X86-AVX2-NEXT:    vpor %ymm0, %ymm1, %ymm0
 ; X86-AVX2-NEXT:    retl
 ;
-; X86-AVX512VL-LABEL: fcopysign_v16f16:
-; X86-AVX512VL:       # %bb.0:
-; X86-AVX512VL-NEXT:    pushl %edi
-; X86-AVX512VL-NEXT:    pushl %esi
-; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VL-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX512VL-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VL-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VL-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %esi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX512VL-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    movl 28(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    movl 12(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 12(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    movl 24(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 24(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    movl 8(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 8(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    movl 20(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 20(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    andl 4(%ecx), %edx
-; X86-AVX512VL-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VL-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X86-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X86-AVX512VL-NEXT:    popl %esi
-; X86-AVX512VL-NEXT:    popl %edi
-; X86-AVX512VL-NEXT:    retl
-;
-; X86-AVX512FP16-LABEL: fcopysign_v16f16:
-; X86-AVX512FP16:       # %bb.0:
-; X86-AVX512FP16-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512FP16-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512FP16-NEXT:    vmovdqu (%ecx), %ymm1
-; X86-AVX512FP16-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
-; X86-AVX512FP16-NEXT:    vpternlogq $202, (%eax), %ymm1, %ymm0
-; X86-AVX512FP16-NEXT:    retl
-;
-; X86-AVX512VLDQ-LABEL: fcopysign_v16f16:
-; X86-AVX512VLDQ:       # %bb.0:
-; X86-AVX512VLDQ-NEXT:    pushl %edi
-; X86-AVX512VLDQ-NEXT:    pushl %esi
-; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX512VLDQ-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    movl 28(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    movl 12(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 12(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    movl 24(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 24(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    movl 8(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 8(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    movl 20(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 20(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    andl 4(%ecx), %edx
-; X86-AVX512VLDQ-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VLDQ-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X86-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X86-AVX512VLDQ-NEXT:    popl %esi
-; X86-AVX512VLDQ-NEXT:    popl %edi
-; X86-AVX512VLDQ-NEXT:    retl
+; X86-AVX512-LABEL: fcopysign_v16f16:
+; X86-AVX512:       # %bb.0:
+; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-AVX512-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-AVX512-NEXT:    vmovdqu (%ecx), %ymm1
+; X86-AVX512-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX512-NEXT:    vpternlogq $202, (%eax), %ymm1, %ymm0
+; X86-AVX512-NEXT:    retl
 ;
 ; X64-SSE-LABEL: fcopysign_v16f16:
 ; X64-SSE:       # %bb.0:
-; X64-SSE-NEXT:    movdqa (%rdi), %xmm1
-; X64-SSE-NEXT:    movdqa 16(%rdi), %xmm2
-; X64-SSE-NEXT:    movdqa (%rsi), %xmm4
-; X64-SSE-NEXT:    movdqa 16(%rsi), %xmm3
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm4[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm5[0],xmm0[1],xmm5[1],xmm0[2],xmm5[2],xmm0[3],xmm5[3]
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm5
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm5
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm5 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm5
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm4[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm5
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm1[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1]
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    movaps %xmm4, %xmm6
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm6 = xmm6[1,1],xmm4[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm1, %xmm6
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm6 = xmm6[1,1],xmm1[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    psrld $16, %xmm4
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm5[0]
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm3[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm2[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm4
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm4
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm4 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm4
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm3[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm4
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm4 = xmm4[1],xmm2[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm4 = xmm4[0],xmm1[0],xmm4[1],xmm1[1]
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm1
-; X64-SSE-NEXT:    psrlq $48, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    psrlq $48, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    movaps %xmm3, %xmm5
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm3[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm2, %xmm5
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    psrld $16, %xmm3
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm2
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0]
+; X64-SSE-NEXT:    movaps {{.*#+}} xmm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-SSE-NEXT:    movaps %xmm1, %xmm2
+; X64-SSE-NEXT:    andnps (%rsi), %xmm2
+; X64-SSE-NEXT:    movaps (%rdi), %xmm0
+; X64-SSE-NEXT:    andps %xmm1, %xmm0
+; X64-SSE-NEXT:    orps %xmm2, %xmm0
+; X64-SSE-NEXT:    movaps %xmm1, %xmm2
+; X64-SSE-NEXT:    andnps 16(%rsi), %xmm2
+; X64-SSE-NEXT:    andps 16(%rdi), %xmm1
+; X64-SSE-NEXT:    orps %xmm2, %xmm1
 ; X64-SSE-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: fcopysign_v16f16:
 ; X64-AVX1:       # %bb.0:
-; X64-AVX1-NEXT:    vbroadcastss 28(%rsi), %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 28(%rdi), %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX1-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX1-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X64-AVX1-NEXT:    vbroadcastss 24(%rsi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 24(%rdi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
-; X64-AVX1-NEXT:    vbroadcastss 20(%rsi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 20(%rdi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm3, %xmm2
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0]
-; X64-AVX1-NEXT:    vbroadcastss 12(%rsi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 12(%rdi), %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm3
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-AVX1-NEXT:    vbroadcastss 8(%rsi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 8(%rdi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
-; X64-AVX1-NEXT:    vbroadcastss 4(%rsi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 4(%rdi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
-; X64-AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0
+; X64-AVX1-NEXT:    vmovups (%rdi), %ymm0
+; X64-AVX1-NEXT:    vmovups (%rsi), %ymm1
+; X64-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm1
+; X64-AVX1-NEXT:    vandps {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
+; X64-AVX1-NEXT:    vorps %ymm1, %ymm0, %ymm0
 ; X64-AVX1-NEXT:    retq
 ;
 ; X64-AVX2-LABEL: fcopysign_v16f16:
 ; X64-AVX2:       # %bb.0:
-; X64-AVX2-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX2-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX2-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX2-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX2-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm2
-; X64-AVX2-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX2-NEXT:    andl 4(%rsi), %eax
-; X64-AVX2-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X64-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
+; X64-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0,-0.0E+0]
+; X64-AVX2-NEXT:    vpand (%rsi), %ymm0, %ymm0
+; X64-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-AVX2-NEXT:    vpand (%rdi), %ymm1, %ymm1
+; X64-AVX2-NEXT:    vpor %ymm0, %ymm1, %ymm0
 ; X64-AVX2-NEXT:    retq
 ;
-; X64-AVX512VL-LABEL: fcopysign_v16f16:
-; X64-AVX512VL:       # %bb.0:
-; X64-AVX512VL-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX512VL-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VL-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VL-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm2
-; X64-AVX512VL-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VL-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X64-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X64-AVX512VL-NEXT:    retq
-;
-; X64-AVX512FP16-LABEL: fcopysign_v16f16:
-; X64-AVX512FP16:       # %bb.0:
-; X64-AVX512FP16-NEXT:    vmovdqu (%rdi), %ymm1
-; X64-AVX512FP16-NEXT:    vpbroadcastq {{.*#+}} ymm0 = [9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063]
-; X64-AVX512FP16-NEXT:    vpternlogq $202, (%rsi), %ymm1, %ymm0
-; X64-AVX512FP16-NEXT:    retq
-;
-; X64-AVX512VLDQ-LABEL: fcopysign_v16f16:
-; X64-AVX512VLDQ:       # %bb.0:
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX512VLDQ-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VLDQ-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm2
-; X64-AVX512VLDQ-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VLDQ-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X64-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X64-AVX512VLDQ-NEXT:    retq
+; X64-AVX512-LABEL: fcopysign_v16f16:
+; X64-AVX512:       # %bb.0:
+; X64-AVX512-NEXT:    vmovdqu (%rdi), %ymm1
+; X64-AVX512-NEXT:    vpbroadcastq {{.*#+}} ymm0 = [9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063]
+; X64-AVX512-NEXT:    vpternlogq $202, (%rsi), %ymm1, %ymm0
+; X64-AVX512-NEXT:    retq
   %a0 = load <16 x half>, ptr %p0, align 16
   %a1 = load <16 x half>, ptr %p1, align 16
   %t = call <16 x half> @llvm.copysign.v16f16(<16 x half> %a0, <16 x half> %a1)
@@ -2741,1225 +641,64 @@ declare <16 x float> @llvm.copysign.v16f32(<16 x float>, <16 x float>)
 define <32 x half> @fcopysign_v32f16(ptr %p0, ptr %p1) nounwind {
 ; X86-SSE-LABEL: fcopysign_v32f16:
 ; X86-SSE:       # %bb.0:
-; X86-SSE-NEXT:    pushl %esi
-; X86-SSE-NEXT:    subl $32, %esp
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-SSE-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-SSE-NEXT:    movaps 48(%ecx), %xmm0
-; X86-SSE-NEXT:    movups %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-SSE-NEXT:    movdqa 32(%ecx), %xmm4
-; X86-SSE-NEXT:    movdqa (%ecx), %xmm5
-; X86-SSE-NEXT:    movdqa 16(%ecx), %xmm3
-; X86-SSE-NEXT:    movdqa (%eax), %xmm0
-; X86-SSE-NEXT:    movdqa %xmm0, %xmm1
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm1
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm6
-; X86-SSE-NEXT:    movdqa %xmm0, %xmm1
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm0[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm1
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm5[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm1
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm6[0],xmm1[1],xmm6[1],xmm1[2],xmm6[2],xmm1[3],xmm6[3]
-; X86-SSE-NEXT:    movdqa %xmm0, %xmm6
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm6 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm6
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm6 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm7
-; X86-SSE-NEXT:    movdqa %xmm0, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm0[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm5[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm6
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm6 = xmm6[0],xmm1[0],xmm6[1],xmm1[1]
-; X86-SSE-NEXT:    movdqa %xmm0, %xmm1
-; X86-SSE-NEXT:    psrlq $48, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm1
-; X86-SSE-NEXT:    psrlq $48, %xmm1
-; X86-SSE-NEXT:    pextrw $0, %xmm1, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm1
-; X86-SSE-NEXT:    movaps %xmm0, %xmm7
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm7 = xmm7[1,1],xmm0[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm7, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm5, %xmm7
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm7 = xmm7[1,1],xmm5[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm7, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm7
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm1[0],xmm7[1],xmm1[1],xmm7[2],xmm1[2],xmm7[3],xmm1[3]
-; X86-SSE-NEXT:    movdqa 16(%eax), %xmm2
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %ecx
-; X86-SSE-NEXT:    psrld $16, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm5
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm6[0]
-; X86-SSE-NEXT:    movdqu %xmm1, (%esp) # 16-byte Spill
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[3,3],xmm2[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[3,3],xmm3[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm5
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm2[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm3[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm6
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm6 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
-; X86-SSE-NEXT:    movdqa %xmm2, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm3, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm0
-; X86-SSE-NEXT:    movaps %xmm2, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm2[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm3, %xmm5
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm5 = xmm5[1,1],xmm3[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm7
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
-; X86-SSE-NEXT:    movdqa 32(%eax), %xmm5
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %edx
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %ecx
-; X86-SSE-NEXT:    psrld $16, %xmm2
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %edx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm3
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm6[0]
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm2
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm2 = xmm2[3,3],xmm5[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm2
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm2 = xmm2[3,3],xmm4[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm2, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm2
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm5[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm6
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm4[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %edx
-; X86-SSE-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %edx
-; X86-SSE-NEXT:    pinsrw $0, %edx, %xmm7
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
-; X86-SSE-NEXT:    movdqa %xmm5, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %ecx
-; X86-SSE-NEXT:    movaps %xmm5, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[1,1],xmm5[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %edx
-; X86-SSE-NEXT:    movaps %xmm4, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %esi
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X86-SSE-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-SSE-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %edx, %esi
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
-; X86-SSE-NEXT:    pinsrw $0, %esi, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
-; X86-SSE-NEXT:    movdqa 48(%eax), %xmm6
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %eax
-; X86-SSE-NEXT:    psrld $16, %xmm5
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %eax, %xmm2
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm4
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %eax
-; X86-SSE-NEXT:    pinsrw $0, %eax, %xmm3
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm2 = xmm2[0],xmm0[0],xmm2[1],xmm0[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm7[0]
-; X86-SSE-NEXT:    movdqa %xmm6, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm4 # 16-byte Reload
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm6, %xmm3
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm3 = xmm3[3,3],xmm6[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm3
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm3 = xmm3[3,3],xmm4[3,3]
-; X86-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; X86-SSE-NEXT:    movdqa %xmm6, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    movdqa %xmm6, %xmm5
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm6[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm5
-; X86-SSE-NEXT:    punpckhqdq {{.*#+}} xmm5 = xmm5[1],xmm4[1]
-; X86-SSE-NEXT:    pextrw $0, %xmm5, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm5
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm5 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm5 = xmm5[0],xmm3[0],xmm5[1],xmm3[1]
-; X86-SSE-NEXT:    movdqa %xmm6, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movdqa %xmm4, %xmm0
-; X86-SSE-NEXT:    psrlq $48, %xmm0
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X86-SSE-NEXT:    movaps %xmm6, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[1,1],xmm6[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X86-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-SSE-NEXT:    movaps %xmm4, %xmm0
-; X86-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[1,1],xmm4[1,1]
-; X86-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X86-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %eax, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %eax
-; X86-SSE-NEXT:    psrld $16, %xmm6
-; X86-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X86-SSE-NEXT:    pinsrw $0, %eax, %xmm3
-; X86-SSE-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-SSE-NEXT:    psrld $16, %xmm4
-; X86-SSE-NEXT:    pextrw $0, %xmm4, %eax
-; X86-SSE-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-SSE-NEXT:    orl %ecx, %eax
-; X86-SSE-NEXT:    pinsrw $0, %eax, %xmm4
-; X86-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-SSE-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1]
-; X86-SSE-NEXT:    punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm5[0]
-; X86-SSE-NEXT:    movups (%esp), %xmm0 # 16-byte Reload
-; X86-SSE-NEXT:    addl $32, %esp
-; X86-SSE-NEXT:    popl %esi
+; X86-SSE-NEXT:    movaps {{.*#+}} xmm3 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-SSE-NEXT:    movaps %xmm3, %xmm1
+; X86-SSE-NEXT:    andnps (%ecx), %xmm1
+; X86-SSE-NEXT:    movaps (%eax), %xmm0
+; X86-SSE-NEXT:    andps %xmm3, %xmm0
+; X86-SSE-NEXT:    orps %xmm1, %xmm0
+; X86-SSE-NEXT:    movaps %xmm3, %xmm2
+; X86-SSE-NEXT:    andnps 16(%ecx), %xmm2
+; X86-SSE-NEXT:    movaps 16(%eax), %xmm1
+; X86-SSE-NEXT:    andps %xmm3, %xmm1
+; X86-SSE-NEXT:    orps %xmm2, %xmm1
+; X86-SSE-NEXT:    movaps %xmm3, %xmm4
+; X86-SSE-NEXT:    andnps 32(%ecx), %xmm4
+; X86-SSE-NEXT:    movaps 32(%eax), %xmm2
+; X86-SSE-NEXT:    andps %xmm3, %xmm2
+; X86-SSE-NEXT:    orps %xmm4, %xmm2
+; X86-SSE-NEXT:    movaps %xmm3, %xmm4
+; X86-SSE-NEXT:    andnps 48(%ecx), %xmm4
+; X86-SSE-NEXT:    andps 48(%eax), %xmm3
+; X86-SSE-NEXT:    orps %xmm4, %xmm3
 ; X86-SSE-NEXT:    retl
 ;
 ; X86-AVX1-LABEL: fcopysign_v32f16:
 ; X86-AVX1:       # %bb.0:
-; X86-AVX1-NEXT:    pushl %esi
 ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX1-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX1-NEXT:    vbroadcastss 28(%ecx), %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 28(%eax), %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX1-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX1-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vbroadcastss 24(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 24(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
-; X86-AVX1-NEXT:    vbroadcastss 20(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 20(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm3, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0]
-; X86-AVX1-NEXT:    vbroadcastss 12(%ecx), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 12(%eax), %xmm3
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX1-NEXT:    vbroadcastss 8(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 8(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
-; X86-AVX1-NEXT:    vbroadcastss 4(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 4(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm0
-; X86-AVX1-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm4[0],xmm0[1],xmm4[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm3[0]
-; X86-AVX1-NEXT:    vinsertf128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX1-NEXT:    vbroadcastss 60(%ecx), %xmm1
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 60(%eax), %xmm1
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm3
-; X86-AVX1-NEXT:    vmovdqa 48(%ecx), %xmm1
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vmovdqa 48(%eax), %xmm2
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X86-AVX1-NEXT:    vbroadcastss 56(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 56(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1]
-; X86-AVX1-NEXT:    vbroadcastss 52(%ecx), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 52(%eax), %xmm4
-; X86-AVX1-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm1
-; X86-AVX1-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm3[0]
-; X86-AVX1-NEXT:    vbroadcastss 44(%ecx), %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 44(%eax), %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX1-NEXT:    vmovdqa 32(%ecx), %xmm2
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vmovdqa 32(%eax), %xmm3
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X86-AVX1-NEXT:    vbroadcastss 40(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 40(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %edx
-; X86-AVX1-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX1-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %edx, %esi
-; X86-AVX1-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
-; X86-AVX1-NEXT:    vbroadcastss 36(%ecx), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X86-AVX1-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X86-AVX1-NEXT:    vbroadcastss 36(%eax), %xmm5
-; X86-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X86-AVX1-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %ecx, %eax
-; X86-AVX1-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm5
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrlq $48, %xmm3, %xmm6
-; X86-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X86-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X86-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X86-AVX1-NEXT:    vpsrld $16, %xmm3, %xmm2
-; X86-AVX1-NEXT:    vpextrw $0, %xmm2, %ecx
-; X86-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X86-AVX1-NEXT:    orl %eax, %ecx
-; X86-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X86-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X86-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm2 = xmm2[0],xmm5[0],xmm2[1],xmm5[1]
-; X86-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm4[0]
-; X86-AVX1-NEXT:    vinsertf128 $1, %xmm1, %ymm2, %ymm1
-; X86-AVX1-NEXT:    popl %esi
+; X86-AVX1-NEXT:    vbroadcastss {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX1-NEXT:    vandnps (%ecx), %ymm1, %ymm0
+; X86-AVX1-NEXT:    vandps (%eax), %ymm1, %ymm2
+; X86-AVX1-NEXT:    vorps %ymm0, %ymm2, %ymm0
+; X86-AVX1-NEXT:    vandnps 32(%ecx), %ymm1, %ymm2
+; X86-AVX1-NEXT:    vandps 32(%eax), %ymm1, %ymm1
+; X86-AVX1-NEXT:    vorps %ymm2, %ymm1, %ymm1
 ; X86-AVX1-NEXT:    retl
 ;
 ; X86-AVX2-LABEL: fcopysign_v32f16:
 ; X86-AVX2:       # %bb.0:
-; X86-AVX2-NEXT:    pushl %ebx
-; X86-AVX2-NEXT:    pushl %edi
-; X86-AVX2-NEXT:    pushl %esi
-; X86-AVX2-NEXT:    subl $32, %esp
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX2-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX2-NEXT:    vmovdqa (%ecx), %xmm0
-; X86-AVX2-NEXT:    vmovdqa 16(%ecx), %xmm2
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %edx
-; X86-AVX2-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    vmovdqa (%eax), %xmm1
-; X86-AVX2-NEXT:    vmovdqa 16(%eax), %xmm3
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX2-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %esi
-; X86-AVX2-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm4
-; X86-AVX2-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX2-NEXT:    movl 28(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    movl 12(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 12(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm5, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    movl 24(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 24(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    movl 8(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 8(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm4 = ymm5[0],ymm4[0],ymm5[1],ymm4[1],ymm5[4],ymm4[4],ymm5[5],ymm4[5]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    vpsrld $16, %xmm2, %xmm6
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm3, %xmm6
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm7
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX2-NEXT:    movl 20(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 20(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    movl 4(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 4(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X86-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm4[0],ymm0[2],ymm4[2]
-; X86-AVX2-NEXT:    vmovdqu %ymm0, (%esp) # 32-byte Spill
-; X86-AVX2-NEXT:    vmovdqa 48(%ecx), %xmm1
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vmovdqa 48(%eax), %xmm2
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm3 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX2-NEXT:    movl 60(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 60(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm4
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X86-AVX2-NEXT:    vmovdqa 32(%ecx), %xmm3
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm4 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vmovdqa 32(%eax), %xmm4
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    movl 44(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 44(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm0
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm6, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    movl 56(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 56(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX2-NEXT:    vpextrw $0, %xmm7, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX2-NEXT:    movl 40(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 40(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm5
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm7[0],xmm5[1],xmm7[1],xmm5[2],xmm7[2],xmm5[3],xmm7[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm6, %ymm5, %ymm5
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm5 = ymm5[0],ymm0[0],ymm5[1],ymm0[1],ymm5[4],ymm0[4],ymm5[5],ymm0[5]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrld $16, %xmm2, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; X86-AVX2-NEXT:    vpextrw $0, %xmm3, %edi
-; X86-AVX2-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX2-NEXT:    vpextrw $0, %xmm4, %esi
-; X86-AVX2-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edi, %esi
-; X86-AVX2-NEXT:    vpsrld $16, %xmm3, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    vpsrld $16, %xmm4, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %ebx
-; X86-AVX2-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX2-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edi, %ebx
-; X86-AVX2-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm7
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm7[0],xmm0[1],xmm7[1],xmm0[2],xmm7[2],xmm0[3],xmm7[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm6, %ymm0, %ymm6
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm2, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    movl 52(%ecx), %esi
-; X86-AVX2-NEXT:    andl %edx, %esi
-; X86-AVX2-NEXT:    movzwl 52(%eax), %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm3, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX2-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX2-NEXT:    vpsrlq $48, %xmm4, %xmm0
-; X86-AVX2-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX2-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %esi, %edi
-; X86-AVX2-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX2-NEXT:    andl 36(%ecx), %edx
-; X86-AVX2-NEXT:    movzwl 36(%eax), %eax
-; X86-AVX2-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX2-NEXT:    orl %edx, %eax
-; X86-AVX2-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm2
-; X86-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-AVX2-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
-; X86-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm6[0],ymm0[0],ymm6[1],ymm0[1],ymm6[4],ymm0[4],ymm6[5],ymm0[5]
-; X86-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm1 = ymm0[0],ymm5[0],ymm0[2],ymm5[2]
-; X86-AVX2-NEXT:    vmovups (%esp), %ymm0 # 32-byte Reload
-; X86-AVX2-NEXT:    addl $32, %esp
-; X86-AVX2-NEXT:    popl %esi
-; X86-AVX2-NEXT:    popl %edi
-; X86-AVX2-NEXT:    popl %ebx
+; X86-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX2-NEXT:    vpandn (%ecx), %ymm1, %ymm0
+; X86-AVX2-NEXT:    vpand (%eax), %ymm1, %ymm2
+; X86-AVX2-NEXT:    vpor %ymm0, %ymm2, %ymm0
+; X86-AVX2-NEXT:    vpandn 32(%ecx), %ymm1, %ymm2
+; X86-AVX2-NEXT:    vpand 32(%eax), %ymm1, %ymm1
+; X86-AVX2-NEXT:    vpor %ymm2, %ymm1, %ymm1
 ; X86-AVX2-NEXT:    retl
 ;
 ; X86-AVX512VL-LABEL: fcopysign_v32f16:
 ; X86-AVX512VL:       # %bb.0:
-; X86-AVX512VL-NEXT:    pushl %ebp
-; X86-AVX512VL-NEXT:    pushl %ebx
-; X86-AVX512VL-NEXT:    pushl %edi
-; X86-AVX512VL-NEXT:    pushl %esi
-; X86-AVX512VL-NEXT:    subl $400, %esp # imm = 0x190
 ; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX512VL-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VL-NEXT:    vmovdqa 48(%ecx), %xmm0
-; X86-AVX512VL-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX512VL-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vmovdqa 48(%eax), %xmm0
-; X86-AVX512VL-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %esi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VL-NEXT:    movl 60(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 60(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vmovdqa 32(%ecx), %xmm1
-; X86-AVX512VL-NEXT:    vmovdqu %xmm1, (%esp) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vmovdqa 32(%eax), %xmm1
-; X86-AVX512VL-NEXT:    vmovdqu %xmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    movl 44(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 44(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VL-NEXT:    vmovdqa 16(%ecx), %xmm5
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    vmovdqa 16(%eax), %xmm0
-; X86-AVX512VL-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    movl 28(%ecx), %esi
-; X86-AVX512VL-NEXT:    andl %edx, %esi
-; X86-AVX512VL-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vmovdqa (%ecx), %xmm4
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vmovdqu %xmm4, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vmovdqa (%eax), %xmm2
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 12(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 12(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm6 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm7 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 56(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 56(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vmovdqu (%esp), %xmm1 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 40(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 40(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 24(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 24(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 8(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 8(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm6, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm7, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ebx
-; X86-AVX512VL-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %ebp
-; X86-AVX512VL-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %ebx, %ebp
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vmovdqu (%esp), %xmm3 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm3, %ebx
-; X86-AVX512VL-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ebp
-; X86-AVX512VL-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %ebx, %ebp
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm5, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm6 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm6, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm5, %ebx
-; X86-AVX512VL-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ebp
-; X86-AVX512VL-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %ebx, %ebp
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm7 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm7, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm4 # 16-byte Reload
-; X86-AVX512VL-NEXT:    vpsrld $16, %xmm4, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm7, %ebx
-; X86-AVX512VL-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm4, %ebp
-; X86-AVX512VL-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %ebx, %ebp
-; X86-AVX512VL-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %esi, %edi
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VL-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Folded Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Folded Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 52(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 52(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vpsrlq $48, (%esp), %xmm1 # 16-byte Folded Reload
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm1
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 36(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 36(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm1
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm5, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm6, %xmm0
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    movl 20(%ecx), %edi
-; X86-AVX512VL-NEXT:    andl %edx, %edi
-; X86-AVX512VL-NEXT:    movzwl 20(%eax), %ebx
-; X86-AVX512VL-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %ebx
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VL-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm7, %xmm2
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VL-NEXT:    vpsrlq $48, %xmm4, %xmm2
-; X86-AVX512VL-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VL-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VL-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edi, %esi
-; X86-AVX512VL-NEXT:    andl 4(%ecx), %edx
-; X86-AVX512VL-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VL-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VL-NEXT:    orl %edx, %eax
-; X86-AVX512VL-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX512VL-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X86-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VL-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; X86-AVX512VL-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm2 # 64-byte Reload
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm2, %zmm2 # 32-byte Folded Reload
-; X86-AVX512VL-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm3 # 64-byte Reload
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm2 = zmm3[0],zmm2[0],zmm3[1],zmm2[1],zmm3[4],zmm2[4],zmm3[5],zmm2[5],zmm3[8],zmm2[8],zmm3[9],zmm2[9],zmm3[12],zmm2[12],zmm3[13],zmm2[13]
-; X86-AVX512VL-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm3 # 64-byte Reload
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
-; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; X86-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm3[0],zmm0[0],zmm3[1],zmm0[1],zmm3[4],zmm0[4],zmm3[5],zmm0[5],zmm3[8],zmm0[8],zmm3[9],zmm0[9],zmm3[12],zmm0[12],zmm3[13],zmm0[13]
-; X86-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm2[0],zmm0[2],zmm2[2],zmm0[4],zmm2[4],zmm0[6],zmm2[6]
-; X86-AVX512VL-NEXT:    addl $400, %esp # imm = 0x190
-; X86-AVX512VL-NEXT:    popl %esi
-; X86-AVX512VL-NEXT:    popl %edi
-; X86-AVX512VL-NEXT:    popl %ebx
-; X86-AVX512VL-NEXT:    popl %ebp
+; X86-AVX512VL-NEXT:    vmovdqu64 (%ecx), %zmm1
+; X86-AVX512VL-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX512VL-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X86-AVX512VL-NEXT:    vpternlogq $202, (%eax), %zmm1, %zmm0
 ; X86-AVX512VL-NEXT:    retl
 ;
 ; X86-AVX512FP16-LABEL: fcopysign_v32f16:
@@ -3973,1741 +712,66 @@ define <32 x half> @fcopysign_v32f16(ptr %p0, ptr %p1) nounwind {
 ;
 ; X86-AVX512VLDQ-LABEL: fcopysign_v32f16:
 ; X86-AVX512VLDQ:       # %bb.0:
-; X86-AVX512VLDQ-NEXT:    pushl %ebp
-; X86-AVX512VLDQ-NEXT:    pushl %ebx
-; X86-AVX512VLDQ-NEXT:    pushl %edi
-; X86-AVX512VLDQ-NEXT:    pushl %esi
-; X86-AVX512VLDQ-NEXT:    subl $400, %esp # imm = 0x190
 ; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %eax
 ; X86-AVX512VLDQ-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X86-AVX512VLDQ-NEXT:    vmovdqa 48(%ecx), %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vmovdqa 48(%eax), %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    movl $-32768, %edx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    movl 60(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 60(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqa 32(%ecx), %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm1, (%esp) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vmovdqa 32(%eax), %xmm1
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm1, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    movl 44(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 44(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm6
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm2[0],xmm6[1],xmm2[1],xmm6[2],xmm2[2],xmm6[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VLDQ-NEXT:    vmovdqa 16(%ecx), %xmm5
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vmovdqa 16(%eax), %xmm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    movl 28(%ecx), %esi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %esi
-; X86-AVX512VLDQ-NEXT:    movzwl 28(%eax), %edi
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm7
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm7[0],xmm0[0],xmm7[1],xmm0[1],xmm7[2],xmm0[2],xmm7[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%ecx), %xmm4
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm4, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vmovdqa (%eax), %xmm2
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vmovdqu %xmm2, {{[-0-9]+}}(%e{{[sb]}}p) # 16-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 12(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 12(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm6 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm7 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 56(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 56(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqu (%esp), %xmm1 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm1 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 40(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 40(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm0 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 24(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 24(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 8(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 8(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm6, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm7, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ebx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %ebx, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqu (%esp), %xmm3 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm2 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %ebx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %ebx, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%e{{[sb]}}p) # 32-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm5, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm6 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm6, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %ebx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %ebx, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm7 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm7, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    vmovdqu {{[-0-9]+}}(%e{{[sb]}}p), %xmm4 # 16-byte Reload
-; X86-AVX512VLDQ-NEXT:    vpsrld $16, %xmm4, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %ebx
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %ebx # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebp # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %ebx, %ebp
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %esi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %edi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %esi, %edi
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebp, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %edi, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm3[0],xmm1[1],xmm3[1],xmm1[2],xmm3[2],xmm1[3],xmm3[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%e{{[sb]}}p) # 64-byte Spill
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, {{[-0-9]+}}(%e{{[sb]}}p), %xmm0 # 16-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 52(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 52(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, (%esp), %xmm1 # 16-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edi
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 36(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 36(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm1
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm1
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm5, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edi
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm6, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    movl 20(%ecx), %edi
-; X86-AVX512VLDQ-NEXT:    andl %edx, %edi
-; X86-AVX512VLDQ-NEXT:    movzwl 20(%eax), %ebx
-; X86-AVX512VLDQ-NEXT:    andl $32767, %ebx # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %ebx
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm0
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %ebx, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm7, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edi
-; X86-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm4, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %esi
-; X86-AVX512VLDQ-NEXT:    andl $-32768, %edi # imm = 0x8000
-; X86-AVX512VLDQ-NEXT:    andl $32767, %esi # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edi, %esi
-; X86-AVX512VLDQ-NEXT:    andl 4(%ecx), %edx
-; X86-AVX512VLDQ-NEXT:    movzwl 4(%eax), %eax
-; X86-AVX512VLDQ-NEXT:    andl $32767, %eax # imm = 0x7FFF
-; X86-AVX512VLDQ-NEXT:    orl %edx, %eax
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %esi, %xmm0, %xmm2
-; X86-AVX512VLDQ-NEXT:    vpinsrw $0, %eax, %xmm0, %xmm3
-; X86-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X86-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm2 # 64-byte Reload
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm2, %zmm2 # 32-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm3 # 64-byte Reload
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm2 = zmm3[0],zmm2[0],zmm3[1],zmm2[1],zmm3[4],zmm2[4],zmm3[5],zmm2[5],zmm3[8],zmm2[8],zmm3[9],zmm2[9],zmm3[12],zmm2[12],zmm3[13],zmm2[13]
-; X86-AVX512VLDQ-NEXT:    vmovdqu64 {{[-0-9]+}}(%e{{[sb]}}p), %zmm3 # 64-byte Reload
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%e{{[sb]}}p), %zmm3, %zmm3 # 32-byte Folded Reload
-; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; X86-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm3[0],zmm0[0],zmm3[1],zmm0[1],zmm3[4],zmm0[4],zmm3[5],zmm0[5],zmm3[8],zmm0[8],zmm3[9],zmm0[9],zmm3[12],zmm0[12],zmm3[13],zmm0[13]
-; X86-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm2[0],zmm0[2],zmm2[2],zmm0[4],zmm2[4],zmm0[6],zmm2[6]
-; X86-AVX512VLDQ-NEXT:    addl $400, %esp # imm = 0x190
-; X86-AVX512VLDQ-NEXT:    popl %esi
-; X86-AVX512VLDQ-NEXT:    popl %edi
-; X86-AVX512VLDQ-NEXT:    popl %ebx
-; X86-AVX512VLDQ-NEXT:    popl %ebp
+; X86-AVX512VLDQ-NEXT:    vmovdqu64 (%ecx), %zmm1
+; X86-AVX512VLDQ-NEXT:    vpbroadcastw {{.*#+}} ymm0 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X86-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; X86-AVX512VLDQ-NEXT:    vpternlogq $202, (%eax), %zmm1, %zmm0
 ; X86-AVX512VLDQ-NEXT:    retl
 ;
 ; X64-SSE-LABEL: fcopysign_v32f16:
 ; X64-SSE:       # %bb.0:
-; X64-SSE-NEXT:    movdqa (%rdi), %xmm1
-; X64-SSE-NEXT:    movdqa 16(%rdi), %xmm2
-; X64-SSE-NEXT:    movdqa 32(%rdi), %xmm3
-; X64-SSE-NEXT:    movdqa 48(%rdi), %xmm4
-; X64-SSE-NEXT:    movdqa (%rsi), %xmm8
-; X64-SSE-NEXT:    movdqa 16(%rsi), %xmm7
-; X64-SSE-NEXT:    movdqa 32(%rsi), %xmm6
-; X64-SSE-NEXT:    movdqa 48(%rsi), %xmm5
-; X64-SSE-NEXT:    movdqa %xmm8, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm0 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm9
-; X64-SSE-NEXT:    movdqa %xmm8, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm8[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm0 = xmm0[3,3],xmm1[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
-; X64-SSE-NEXT:    movdqa %xmm8, %xmm9
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm9 = xmm9[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm9
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm9 = xmm9[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm10
-; X64-SSE-NEXT:    movdqa %xmm8, %xmm9
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm9 = xmm9[1],xmm8[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm9
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm9 = xmm9[1],xmm1[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm9
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm9 = xmm9[0],xmm0[0],xmm9[1],xmm0[1]
-; X64-SSE-NEXT:    movdqa %xmm8, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm1, %xmm0
-; X64-SSE-NEXT:    psrlq $48, %xmm0
-; X64-SSE-NEXT:    pextrw $0, %xmm0, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    movaps %xmm8, %xmm10
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm10 = xmm10[1,1],xmm8[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm10, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm1, %xmm10
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm10 = xmm10[1,1],xmm1[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm10, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm10
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm10 = xmm10[0],xmm0[0],xmm10[1],xmm0[1],xmm10[2],xmm0[2],xmm10[3],xmm0[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm0
-; X64-SSE-NEXT:    psrld $16, %xmm8
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm0 = xmm0[0],xmm10[0],xmm0[1],xmm10[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm9[0]
-; X64-SSE-NEXT:    movdqa %xmm7, %xmm1
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm1 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm8
-; X64-SSE-NEXT:    movdqa %xmm7, %xmm1
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm7[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm1 = xmm1[3,3],xmm2[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm8[0],xmm1[1],xmm8[1],xmm1[2],xmm8[2],xmm1[3],xmm8[3]
-; X64-SSE-NEXT:    movdqa %xmm7, %xmm8
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm8 = xmm8[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm8
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm8 = xmm8[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm9
-; X64-SSE-NEXT:    movdqa %xmm7, %xmm8
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm8 = xmm8[1],xmm7[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm8
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm8 = xmm8[1],xmm2[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm8
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm8 = xmm8[0],xmm1[0],xmm8[1],xmm1[1]
-; X64-SSE-NEXT:    movdqa %xmm7, %xmm1
-; X64-SSE-NEXT:    psrlq $48, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm2, %xmm1
-; X64-SSE-NEXT:    psrlq $48, %xmm1
-; X64-SSE-NEXT:    pextrw $0, %xmm1, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    movaps %xmm7, %xmm9
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm9 = xmm9[1,1],xmm7[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm2, %xmm9
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm9 = xmm9[1,1],xmm2[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm9, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm9
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm9 = xmm9[0],xmm1[0],xmm9[1],xmm1[1],xmm9[2],xmm1[2],xmm9[3],xmm1[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm1
-; X64-SSE-NEXT:    psrld $16, %xmm7
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm2
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm1 = xmm1[0],xmm9[0],xmm1[1],xmm9[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm8[0]
-; X64-SSE-NEXT:    movdqa %xmm6, %xmm2
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm2
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm2 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm7
-; X64-SSE-NEXT:    movdqa %xmm6, %xmm2
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm2 = xmm2[3,3],xmm6[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm2
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm2 = xmm2[3,3],xmm3[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
-; X64-SSE-NEXT:    movdqa %xmm6, %xmm7
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm7 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm7
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm7 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm8
-; X64-SSE-NEXT:    movdqa %xmm6, %xmm7
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm7 = xmm7[1],xmm6[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm7
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm7 = xmm7[1],xmm3[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm7
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm7 = xmm7[0],xmm2[0],xmm7[1],xmm2[1]
-; X64-SSE-NEXT:    movdqa %xmm6, %xmm2
-; X64-SSE-NEXT:    psrlq $48, %xmm2
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm3, %xmm2
-; X64-SSE-NEXT:    psrlq $48, %xmm2
-; X64-SSE-NEXT:    pextrw $0, %xmm2, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X64-SSE-NEXT:    movaps %xmm6, %xmm8
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm8 = xmm8[1,1],xmm6[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm3, %xmm8
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm8 = xmm8[1,1],xmm3[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm8, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm8
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm8 = xmm8[0],xmm2[0],xmm8[1],xmm2[1],xmm8[2],xmm2[2],xmm8[3],xmm2[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm2
-; X64-SSE-NEXT:    psrld $16, %xmm6
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm3
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm2 = xmm2[0],xmm3[0],xmm2[1],xmm3[1],xmm2[2],xmm3[2],xmm2[3],xmm3[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm2 = xmm2[0],xmm8[0],xmm2[1],xmm8[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm7[0]
-; X64-SSE-NEXT:    movdqa %xmm5, %xmm3
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm3
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm3 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X64-SSE-NEXT:    movdqa %xmm5, %xmm3
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm3 = xmm3[3,3],xmm5[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm3
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm3 = xmm3[3,3],xmm4[3,3]
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1],xmm3[2],xmm6[2],xmm3[3],xmm6[3]
-; X64-SSE-NEXT:    movdqa %xmm5, %xmm6
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm6 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm6
-; X64-SSE-NEXT:    psrldq {{.*#+}} xmm6 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm7
-; X64-SSE-NEXT:    movdqa %xmm5, %xmm6
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm5[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm6
-; X64-SSE-NEXT:    punpckhqdq {{.*#+}} xmm6 = xmm6[1],xmm4[1]
-; X64-SSE-NEXT:    pextrw $0, %xmm6, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm6
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm6 = xmm6[0],xmm3[0],xmm6[1],xmm3[1]
-; X64-SSE-NEXT:    movdqa %xmm5, %xmm3
-; X64-SSE-NEXT:    psrlq $48, %xmm3
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movdqa %xmm4, %xmm3
-; X64-SSE-NEXT:    psrlq $48, %xmm3
-; X64-SSE-NEXT:    pextrw $0, %xmm3, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    movaps %xmm5, %xmm7
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm7 = xmm7[1,1],xmm5[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    movaps %xmm4, %xmm7
-; X64-SSE-NEXT:    shufps {{.*#+}} xmm7 = xmm7[1,1],xmm4[1,1]
-; X64-SSE-NEXT:    pextrw $0, %xmm7, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm7
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm7 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm3
-; X64-SSE-NEXT:    psrld $16, %xmm5
-; X64-SSE-NEXT:    pextrw $0, %xmm5, %eax
-; X64-SSE-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-SSE-NEXT:    psrld $16, %xmm4
-; X64-SSE-NEXT:    pextrw $0, %xmm4, %ecx
-; X64-SSE-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-SSE-NEXT:    orl %eax, %ecx
-; X64-SSE-NEXT:    pinsrw $0, %ecx, %xmm4
-; X64-SSE-NEXT:    punpcklwd {{.*#+}} xmm3 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; X64-SSE-NEXT:    punpckldq {{.*#+}} xmm3 = xmm3[0],xmm7[0],xmm3[1],xmm7[1]
-; X64-SSE-NEXT:    punpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm6[0]
+; X64-SSE-NEXT:    movaps {{.*#+}} xmm3 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-SSE-NEXT:    movaps %xmm3, %xmm1
+; X64-SSE-NEXT:    andnps (%rsi), %xmm1
+; X64-SSE-NEXT:    movaps (%rdi), %xmm0
+; X64-SSE-NEXT:    andps %xmm3, %xmm0
+; X64-SSE-NEXT:    orps %xmm1, %xmm0
+; X64-SSE-NEXT:    movaps %xmm3, %xmm2
+; X64-SSE-NEXT:    andnps 16(%rsi), %xmm2
+; X64-SSE-NEXT:    movaps 16(%rdi), %xmm1
+; X64-SSE-NEXT:    andps %xmm3, %xmm1
+; X64-SSE-NEXT:    orps %xmm2, %xmm1
+; X64-SSE-NEXT:    movaps %xmm3, %xmm4
+; X64-SSE-NEXT:    andnps 32(%rsi), %xmm4
+; X64-SSE-NEXT:    movaps 32(%rdi), %xmm2
+; X64-SSE-NEXT:    andps %xmm3, %xmm2
+; X64-SSE-NEXT:    orps %xmm4, %xmm2
+; X64-SSE-NEXT:    movaps %xmm3, %xmm4
+; X64-SSE-NEXT:    andnps 48(%rsi), %xmm4
+; X64-SSE-NEXT:    andps 48(%rdi), %xmm3
+; X64-SSE-NEXT:    orps %xmm4, %xmm3
 ; X64-SSE-NEXT:    retq
 ;
 ; X64-AVX1-LABEL: fcopysign_v32f16:
 ; X64-AVX1:       # %bb.0:
-; X64-AVX1-NEXT:    vbroadcastss 28(%rsi), %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 28(%rdi), %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX1-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX1-NEXT:    vmovdqa 16(%rsi), %xmm6
-; X64-AVX1-NEXT:    vmovdqa 32(%rsi), %xmm1
-; X64-AVX1-NEXT:    vmovdqa 48(%rsi), %xmm3
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm6[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vmovdqa (%rdi), %xmm5
-; X64-AVX1-NEXT:    vmovdqa 16(%rdi), %xmm7
-; X64-AVX1-NEXT:    vmovdqa 32(%rdi), %xmm2
-; X64-AVX1-NEXT:    vmovdqa 48(%rdi), %xmm4
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm7[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX1-NEXT:    vbroadcastss 24(%rsi), %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 24(%rdi), %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm10, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm10
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1]
-; X64-AVX1-NEXT:    vbroadcastss 20(%rsi), %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 20(%rdi), %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm6, %xmm10
-; X64-AVX1-NEXT:    vpextrw $0, %xmm10, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm7, %xmm10
-; X64-AVX1-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm10
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm10
-; X64-AVX1-NEXT:    vpsrld $16, %xmm6, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm7, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm6[0],xmm10[1],xmm6[1],xmm10[2],xmm6[2],xmm10[3],xmm6[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm6 = xmm6[0],xmm9[0],xmm6[1],xmm9[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm8[0]
-; X64-AVX1-NEXT:    vbroadcastss 12(%rsi), %xmm7
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 12(%rdi), %xmm7
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX1-NEXT:    vbroadcastss 8(%rsi), %xmm8
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 8(%rdi), %xmm8
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1]
-; X64-AVX1-NEXT:    vbroadcastss 4(%rsi), %xmm8
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 4(%rdi), %xmm8
-; X64-AVX1-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm5, %xmm9
-; X64-AVX1-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm9[0],xmm8[1],xmm9[1],xmm8[2],xmm9[2],xmm8[3],xmm9[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm9
-; X64-AVX1-NEXT:    vpsrld $16, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm5, %xmm0
-; X64-AVX1-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm0
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm9[0],xmm0[0],xmm9[1],xmm0[1],xmm9[2],xmm0[2],xmm9[3],xmm0[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm8[0],xmm0[1],xmm8[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm7[0]
-; X64-AVX1-NEXT:    vinsertf128 $1, %xmm6, %ymm0, %ymm0
-; X64-AVX1-NEXT:    vbroadcastss 60(%rsi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 60(%rdi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX1-NEXT:    vbroadcastss 56(%rsi), %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 56(%rdi), %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1]
-; X64-AVX1-NEXT:    vbroadcastss 52(%rsi), %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 52(%rdi), %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm3, %xmm7
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm4, %xmm7
-; X64-AVX1-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm7
-; X64-AVX1-NEXT:    vpsrld $16, %xmm3, %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm4, %xmm3
-; X64-AVX1-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm3
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm3 = xmm3[0],xmm6[0],xmm3[1],xmm6[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm3 = xmm3[0],xmm5[0]
-; X64-AVX1-NEXT:    vbroadcastss 44(%rsi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 44(%rdi), %xmm4
-; X64-AVX1-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm4
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm4[0],xmm5[0],xmm4[1],xmm5[1],xmm4[2],xmm5[2],xmm4[3],xmm5[3]
-; X64-AVX1-NEXT:    vbroadcastss 40(%rsi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 40(%rdi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1]
-; X64-AVX1-NEXT:    vbroadcastss 36(%rsi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vbroadcastss 36(%rdi), %xmm5
-; X64-AVX1-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm5
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm1, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrlq $48, %xmm2, %xmm6
-; X64-AVX1-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3]
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm6
-; X64-AVX1-NEXT:    vpsrld $16, %xmm1, %xmm1
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX1-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX1-NEXT:    vpsrld $16, %xmm2, %xmm1
-; X64-AVX1-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX1-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX1-NEXT:    orl %eax, %ecx
-; X64-AVX1-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX1-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm6[0],xmm1[0],xmm6[1],xmm1[1],xmm6[2],xmm1[2],xmm6[3],xmm1[3]
-; X64-AVX1-NEXT:    vpunpckldq {{.*#+}} xmm1 = xmm1[0],xmm5[0],xmm1[1],xmm5[1]
-; X64-AVX1-NEXT:    vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm4[0]
-; X64-AVX1-NEXT:    vinsertf128 $1, %xmm3, %ymm1, %ymm1
+; X64-AVX1-NEXT:    vbroadcastss {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-AVX1-NEXT:    vandnps (%rsi), %ymm1, %ymm0
+; X64-AVX1-NEXT:    vandps (%rdi), %ymm1, %ymm2
+; X64-AVX1-NEXT:    vorps %ymm0, %ymm2, %ymm0
+; X64-AVX1-NEXT:    vandnps 32(%rsi), %ymm1, %ymm2
+; X64-AVX1-NEXT:    vandps 32(%rdi), %ymm1, %ymm1
+; X64-AVX1-NEXT:    vorps %ymm2, %ymm1, %ymm1
 ; X64-AVX1-NEXT:    retq
 ;
 ; X64-AVX2-LABEL: fcopysign_v32f16:
 ; X64-AVX2:       # %bb.0:
-; X64-AVX2-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX2-NEXT:    vmovdqa 16(%rsi), %xmm6
-; X64-AVX2-NEXT:    vmovdqa 32(%rsi), %xmm1
-; X64-AVX2-NEXT:    vmovdqa 48(%rsi), %xmm3
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm2 = xmm6[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %eax
-; X64-AVX2-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    vmovdqa (%rdi), %xmm5
-; X64-AVX2-NEXT:    vmovdqa 16(%rdi), %xmm7
-; X64-AVX2-NEXT:    vmovdqa 32(%rdi), %xmm2
-; X64-AVX2-NEXT:    vmovdqa 48(%rdi), %xmm4
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm7[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX2-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX2-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX2-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm8, %ymm9, %ymm8
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX2-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX2-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm8 = ymm9[0],ymm8[0],ymm9[1],ymm8[1],ymm9[4],ymm8[4],ymm9[5],ymm8[5]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX2-NEXT:    vpsrld $16, %xmm6, %xmm10
-; X64-AVX2-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm7, %xmm10
-; X64-AVX2-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX2-NEXT:    vpsrld $16, %xmm0, %xmm11
-; X64-AVX2-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm5, %xmm11
-; X64-AVX2-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm6, %xmm6
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm7, %xmm6
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm5, %xmm0
-; X64-AVX2-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX2-NEXT:    movl 4(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 4(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm5[0],xmm0[0],xmm5[1],xmm0[1],xmm5[2],xmm0[2],xmm5[3],xmm0[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm6, %ymm0, %ymm0
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm0 = ymm9[0],ymm0[0],ymm9[1],ymm0[1],ymm9[4],ymm0[4],ymm9[5],ymm0[5]
-; X64-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm0 = ymm0[0],ymm8[0],ymm0[2],ymm8[2]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm5 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX2-NEXT:    movl 60(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 60(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    movl 44(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 44(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm6 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    movl 56(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 56(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrldq {{.*#+}} xmm7 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    movl 40(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 40(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm8
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm5 = ymm6[0],ymm5[0],ymm6[1],ymm5[1],ymm6[4],ymm5[4],ymm6[5],ymm5[5]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX2-NEXT:    vpsrld $16, %xmm3, %xmm7
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm4, %xmm7
-; X64-AVX2-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm6[0],xmm7[0],xmm6[1],xmm7[1],xmm6[2],xmm7[2],xmm6[3],xmm7[3]
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX2-NEXT:    vpsrld $16, %xmm1, %xmm8
-; X64-AVX2-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrld $16, %xmm2, %xmm8
-; X64-AVX2-NEXT:    vpextrw $0, %xmm8, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm8
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm3, %xmm3
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm4, %xmm3
-; X64-AVX2-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX2-NEXT:    movl 52(%rsi), %ecx
-; X64-AVX2-NEXT:    andl %eax, %ecx
-; X64-AVX2-NEXT:    movzwl 52(%rdi), %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm1, %xmm1
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %ecx
-; X64-AVX2-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX2-NEXT:    vpsrlq $48, %xmm2, %xmm1
-; X64-AVX2-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX2-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %ecx, %edx
-; X64-AVX2-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm1
-; X64-AVX2-NEXT:    andl 36(%rsi), %eax
-; X64-AVX2-NEXT:    movzwl 36(%rdi), %ecx
-; X64-AVX2-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX2-NEXT:    orl %eax, %ecx
-; X64-AVX2-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm2
-; X64-AVX2-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; X64-AVX2-NEXT:    vinserti128 $1, %xmm3, %ymm1, %ymm1
-; X64-AVX2-NEXT:    vpunpckldq {{.*#+}} ymm1 = ymm6[0],ymm1[0],ymm6[1],ymm1[1],ymm6[4],ymm1[4],ymm6[5],ymm1[5]
-; X64-AVX2-NEXT:    vpunpcklqdq {{.*#+}} ymm1 = ymm1[0],ymm5[0],ymm1[2],ymm5[2]
+; X64-AVX2-NEXT:    vpbroadcastw {{.*#+}} ymm1 = [NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN,NaN]
+; X64-AVX2-NEXT:    vpandn (%rsi), %ymm1, %ymm0
+; X64-AVX2-NEXT:    vpand (%rdi), %ymm1, %ymm2
+; X64-AVX2-NEXT:    vpor %ymm0, %ymm2, %ymm0
+; X64-AVX2-NEXT:    vpandn 32(%rsi), %ymm1, %ymm2
+; X64-AVX2-NEXT:    vpand 32(%rdi), %ymm1, %ymm1
+; X64-AVX2-NEXT:    vpor %ymm2, %ymm1, %ymm1
 ; X64-AVX2-NEXT:    retq
 ;
-; X64-AVX512VL-LABEL: fcopysign_v32f16:
-; X64-AVX512VL:       # %bb.0:
-; X64-AVX512VL-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX512VL-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX512VL-NEXT:    vmovdqa 32(%rsi), %xmm4
-; X64-AVX512VL-NEXT:    vmovdqa 48(%rsi), %xmm6
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm6[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VL-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VL-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX512VL-NEXT:    vmovdqa 32(%rdi), %xmm5
-; X64-AVX512VL-NEXT:    vmovdqa 48(%rdi), %xmm7
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm7[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX512VL-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VL-NEXT:    movl 60(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 60(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    movl 44(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 44(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm8, %ymm9, %ymm8
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm8, %zmm9, %zmm8
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    movl 56(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 56(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    movl 40(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 40(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm11 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrldq {{.*#+}} xmm11 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm12
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm8 = zmm9[0],zmm8[0],zmm9[1],zmm8[1],zmm9[4],zmm8[4],zmm9[5],zmm8[5],zmm9[8],zmm8[8],zmm9[9],zmm8[9],zmm9[12],zmm8[12],zmm9[13],zmm8[13]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm6, %xmm10
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm7, %xmm10
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm4, %xmm11
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm5, %xmm11
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm2, %xmm11
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm3, %xmm11
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm0, %xmm12
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm12, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrld $16, %xmm1, %xmm12
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm12, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm12
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm6, %xmm6
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm7, %xmm6
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VL-NEXT:    movl 52(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 52(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm4, %xmm4
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm5, %xmm4
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VL-NEXT:    movl 36(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 36(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm6, %ymm4, %ymm4
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm2
-; X64-AVX512VL-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX512VL-NEXT:    andl %eax, %ecx
-; X64-AVX512VL-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VL-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VL-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X64-AVX512VL-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VL-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %ecx, %edx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VL-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VL-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VL-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VL-NEXT:    orl %eax, %ecx
-; X64-AVX512VL-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VL-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VL-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VL-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; X64-AVX512VL-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm9[0],zmm0[0],zmm9[1],zmm0[1],zmm9[4],zmm0[4],zmm9[5],zmm0[5],zmm9[8],zmm0[8],zmm9[9],zmm0[9],zmm9[12],zmm0[12],zmm9[13],zmm0[13]
-; X64-AVX512VL-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm8[0],zmm0[2],zmm8[2],zmm0[4],zmm8[4],zmm0[6],zmm8[6]
-; X64-AVX512VL-NEXT:    retq
-;
-; X64-AVX512FP16-LABEL: fcopysign_v32f16:
-; X64-AVX512FP16:       # %bb.0:
-; X64-AVX512FP16-NEXT:    vmovdqu64 (%rdi), %zmm1
-; X64-AVX512FP16-NEXT:    vpbroadcastq {{.*#+}} zmm0 = [9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063]
-; X64-AVX512FP16-NEXT:    vpternlogq $202, (%rsi), %zmm1, %zmm0
-; X64-AVX512FP16-NEXT:    retq
-;
-; X64-AVX512VLDQ-LABEL: fcopysign_v32f16:
-; X64-AVX512VLDQ:       # %bb.0:
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rsi), %xmm0
-; X64-AVX512VLDQ-NEXT:    vmovdqa 16(%rsi), %xmm2
-; X64-AVX512VLDQ-NEXT:    vmovdqa 32(%rsi), %xmm4
-; X64-AVX512VLDQ-NEXT:    vmovdqa 48(%rsi), %xmm6
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm1 = xmm6[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %eax
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vmovdqa (%rdi), %xmm1
-; X64-AVX512VLDQ-NEXT:    vmovdqa 16(%rdi), %xmm3
-; X64-AVX512VLDQ-NEXT:    vmovdqa 32(%rdi), %xmm5
-; X64-AVX512VLDQ-NEXT:    vmovdqa 48(%rdi), %xmm7
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm8 = xmm7[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm8, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm8
-; X64-AVX512VLDQ-NEXT:    movl $-32768, %eax # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    movl 60(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 60(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm4[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm5[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    movl 44(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 44(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm8, %ymm9, %ymm8
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm2[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm3[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    movl 28(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 28(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm0[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm1[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    movl 12(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 12(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm8, %zmm9, %zmm8
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm6[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm9 = xmm7[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm9, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    movl 56(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 56(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm4[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm5[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    movl 40(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 40(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm2[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm10 = xmm3[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    movl 24(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 24(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm11 = xmm0[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrldq {{.*#+}} xmm11 = xmm1[10,11,12,13,14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    movl 8(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 8(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm12
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm8 = zmm9[0],zmm8[0],zmm9[1],zmm8[1],zmm9[4],zmm8[4],zmm9[5],zmm8[5],zmm9[8],zmm8[8],zmm9[9],zmm8[9],zmm9[12],zmm8[12],zmm9[13],zmm8[13]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm7, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm9
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm6, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm7, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm10, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm9[0],xmm10[0],xmm9[1],xmm10[1],xmm9[2],xmm10[2],xmm9[3],xmm10[3]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm5, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm4, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm5, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm3, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm10
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm2, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm3, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm11, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm10[0],xmm11[0],xmm10[1],xmm11[1],xmm10[2],xmm11[2],xmm10[3],xmm11[3]
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm1, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm11
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm0, %xmm12
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm12, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrld $16, %xmm1, %xmm12
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm12, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm12
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm6, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm7, %xmm6
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm6, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm6
-; X64-AVX512VLDQ-NEXT:    movl 52(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 52(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm7
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm4, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm5, %xmm4
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm4, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm4
-; X64-AVX512VLDQ-NEXT:    movl 36(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 36(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm5
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm6, %ymm4, %ymm4
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm2, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm3, %xmm2
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm2, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm2
-; X64-AVX512VLDQ-NEXT:    movl 20(%rsi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    movzwl 20(%rdi), %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm3
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %ecx
-; X64-AVX512VLDQ-NEXT:    andl $-32768, %ecx # imm = 0x8000
-; X64-AVX512VLDQ-NEXT:    vpsrlq $48, %xmm1, %xmm0
-; X64-AVX512VLDQ-NEXT:    vpextrw $0, %xmm0, %edx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %edx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %ecx, %edx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %edx, %xmm0, %xmm0
-; X64-AVX512VLDQ-NEXT:    andl 4(%rsi), %eax
-; X64-AVX512VLDQ-NEXT:    movzwl 4(%rdi), %ecx
-; X64-AVX512VLDQ-NEXT:    andl $32767, %ecx # imm = 0x7FFF
-; X64-AVX512VLDQ-NEXT:    orl %eax, %ecx
-; X64-AVX512VLDQ-NEXT:    vpinsrw $0, %ecx, %xmm0, %xmm1
-; X64-AVX512VLDQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; X64-AVX512VLDQ-NEXT:    vinserti128 $1, %xmm2, %ymm0, %ymm0
-; X64-AVX512VLDQ-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; X64-AVX512VLDQ-NEXT:    vpunpckldq {{.*#+}} zmm0 = zmm9[0],zmm0[0],zmm9[1],zmm0[1],zmm9[4],zmm0[4],zmm9[5],zmm0[5],zmm9[8],zmm0[8],zmm9[9],zmm0[9],zmm9[12],zmm0[12],zmm9[13],zmm0[13]
-; X64-AVX512VLDQ-NEXT:    vpunpcklqdq {{.*#+}} zmm0 = zmm0[0],zmm8[0],zmm0[2],zmm8[2],zmm0[4],zmm8[4],zmm0[6],zmm8[6]
-; X64-AVX512VLDQ-NEXT:    retq
+; X64-AVX512-LABEL: fcopysign_v32f16:
+; X64-AVX512:       # %bb.0:
+; X64-AVX512-NEXT:    vmovdqu64 (%rdi), %zmm1
+; X64-AVX512-NEXT:    vpbroadcastq {{.*#+}} zmm0 = [9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063,9223231297218904063]
+; X64-AVX512-NEXT:    vpternlogq $202, (%rsi), %zmm1, %zmm0
+; X64-AVX512-NEXT:    retq
   %a0 = load <32 x half>, ptr %p0, align 16
   %a1 = load <32 x half>, ptr %p1, align 16
   %t = call <32 x half> @llvm.copysign.v32f16(<32 x half> %a0, <32 x half> %a1)
@@ -5717,5 +781,8 @@ declare <32 x half> @llvm.copysign.v32f16(<32 x half>, <32 x half>)
 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
 ; X64: {{.*}}
 ; X64-AVX: {{.*}}
+; X64-AVX512FP16: {{.*}}
+; X64-AVX512VL: {{.*}}
+; X64-AVX512VLDQ: {{.*}}
 ; X86: {{.*}}
 ; X86-AVX: {{.*}}


        


More information about the llvm-commits mailing list