[llvm] r319059 - [X86] Make getSetCCResultType return vXi1 for any vXi32/vXi64 vector over 512 bits long when AVX512 is enabled.
Craig Topper via llvm-commits
llvm-commits at lists.llvm.org
Mon Nov 27 09:51:56 PST 2017
Author: ctopper
Date: Mon Nov 27 09:51:55 2017
New Revision: 319059
URL: http://llvm.org/viewvc/llvm-project?rev=319059&view=rev
Log:
[X86] Make getSetCCResultType return vXi1 for any vXi32/vXi64 vector over 512 bits long when AVX512 is enabled.
Similar for vXi16/vXi8 with BWI.
Any vector larger than 512 bits will be split to 512 bits during legalization. But without this we will fold sexts with them before that making it difficult to recover leading to scalarization.
Modified:
llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
llvm/trunk/test/CodeGen/X86/vector-compare-results.ll
llvm/trunk/test/CodeGen/X86/vselect-packss.ll
Modified: llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86ISelLowering.cpp?rev=319059&r1=319058&r2=319059&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86ISelLowering.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86ISelLowering.cpp Mon Nov 27 09:51:55 2017
@@ -1722,7 +1722,7 @@ EVT X86TargetLowering::getSetCCResultTyp
MVT VVT = VT.getSimpleVT();
const unsigned NumElts = VVT.getVectorNumElements();
MVT EltVT = VVT.getVectorElementType();
- if (VVT.is512BitVector()) {
+ if (VVT.getSizeInBits() >= 512) {
if (Subtarget.hasAVX512())
if (EltVT == MVT::i32 || EltVT == MVT::i64 ||
EltVT == MVT::f32 || EltVT == MVT::f64)
Modified: llvm/trunk/test/CodeGen/X86/vector-compare-results.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/vector-compare-results.ll?rev=319059&r1=319058&r2=319059&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/vector-compare-results.ll (original)
+++ llvm/trunk/test/CodeGen/X86/vector-compare-results.ll Mon Nov 27 09:51:55 2017
@@ -3237,137 +3237,35 @@ define <16 x i1> @test_cmp_v16i64(<16 x
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512-LABEL: test_cmp_v16i64:
-; AVX512: # BB#0:
-; AVX512-NEXT: vextracti32x4 $3, %zmm2, %xmm4
-; AVX512-NEXT: vpextrq $1, %xmm4, %rcx
-; AVX512-NEXT: vextracti32x4 $3, %zmm0, %xmm5
-; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512-NEXT: xorl %eax, %eax
-; AVX512-NEXT: cmpq %rcx, %rdx
-; AVX512-NEXT: movq $-1, %rcx
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm6
-; AVX512-NEXT: vmovq %xmm4, %rdx
-; AVX512-NEXT: vmovq %xmm5, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm4
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512-NEXT: vextracti32x4 $2, %zmm2, %xmm5
-; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm7
-; AVX512-NEXT: vmovq %xmm5, %rdx
-; AVX512-NEXT: vmovq %xmm6, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm5
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512-NEXT: vextracti128 $1, %ymm2, %xmm5
-; AVX512-NEXT: vpextrq $1, %xmm5, %rdx
-; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm6
-; AVX512-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm7
-; AVX512-NEXT: vmovq %xmm5, %rdx
-; AVX512-NEXT: vmovq %xmm6, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm5
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm5 = xmm5[0],xmm7[0]
-; AVX512-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm6
-; AVX512-NEXT: vmovq %xmm2, %rdx
-; AVX512-NEXT: vmovq %xmm0, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm0
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm6[0]
-; AVX512-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512-NEXT: vextracti32x4 $3, %zmm1, %xmm4
-; AVX512-NEXT: vpextrq $1, %xmm4, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm5
-; AVX512-NEXT: vmovq %xmm2, %rdx
-; AVX512-NEXT: vmovq %xmm4, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm2
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm5[0]
-; AVX512-NEXT: vextracti32x4 $2, %zmm3, %xmm4
-; AVX512-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512-NEXT: vextracti32x4 $2, %zmm1, %xmm5
-; AVX512-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm6
-; AVX512-NEXT: vmovq %xmm4, %rdx
-; AVX512-NEXT: vmovq %xmm5, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm4
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512-NEXT: vextracti128 $1, %ymm3, %xmm4
-; AVX512-NEXT: vpextrq $1, %xmm4, %rdx
-; AVX512-NEXT: vextracti128 $1, %ymm1, %xmm5
-; AVX512-NEXT: vpextrq $1, %xmm5, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm6
-; AVX512-NEXT: vmovq %xmm4, %rdx
-; AVX512-NEXT: vmovq %xmm5, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm4
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm4 = xmm4[0],xmm6[0]
-; AVX512-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: movl $0, %edx
-; AVX512-NEXT: cmovgq %rcx, %rdx
-; AVX512-NEXT: vmovq %rdx, %xmm5
-; AVX512-NEXT: vmovq %xmm3, %rdx
-; AVX512-NEXT: vmovq %xmm1, %rsi
-; AVX512-NEXT: cmpq %rdx, %rsi
-; AVX512-NEXT: cmovgq %rcx, %rax
-; AVX512-NEXT: vmovq %rax, %xmm1
-; AVX512-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm5[0]
-; AVX512-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512-NEXT: vpmovqd %zmm1, %ymm1
-; AVX512-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512-NEXT: vzeroupper
-; AVX512-NEXT: retq
+; AVX512F-LABEL: test_cmp_v16i64:
+; AVX512F: # BB#0:
+; AVX512F-NEXT: vpcmpgtq %zmm2, %zmm0, %k0
+; AVX512F-NEXT: vpcmpgtq %zmm3, %zmm1, %k1
+; AVX512F-NEXT: kunpckbw %k0, %k1, %k1
+; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
+; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512F-NEXT: vzeroupper
+; AVX512F-NEXT: retq
+;
+; AVX512DQ-LABEL: test_cmp_v16i64:
+; AVX512DQ: # BB#0:
+; AVX512DQ-NEXT: vpcmpgtq %zmm2, %zmm0, %k0
+; AVX512DQ-NEXT: vpcmpgtq %zmm3, %zmm1, %k1
+; AVX512DQ-NEXT: kunpckbw %k0, %k1, %k0
+; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
+; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512DQ-NEXT: vzeroupper
+; AVX512DQ-NEXT: retq
+;
+; AVX512BW-LABEL: test_cmp_v16i64:
+; AVX512BW: # BB#0:
+; AVX512BW-NEXT: vpcmpgtq %zmm2, %zmm0, %k0
+; AVX512BW-NEXT: vpcmpgtq %zmm3, %zmm1, %k1
+; AVX512BW-NEXT: kunpckbw %k0, %k1, %k0
+; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
+; AVX512BW-NEXT: # kill: %XMM0<def> %XMM0<kill> %ZMM0<kill>
+; AVX512BW-NEXT: vzeroupper
+; AVX512BW-NEXT: retq
%1 = icmp sgt <16 x i64> %a0, %a1
ret <16 x i1> %1
}
@@ -5713,412 +5611,10 @@ define <64 x i1> @test_cmp_v64i16(<64 x
;
; AVX512BW-LABEL: test_cmp_v64i16:
; AVX512BW: # BB#0:
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm2, %xmm4
-; AVX512BW-NEXT: vpextrw $1, %xmm4, %ecx
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm0, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx
-; AVX512BW-NEXT: xorl %eax, %eax
-; AVX512BW-NEXT: cmpw %cx, %dx
-; AVX512BW-NEXT: movl $65535, %ecx # imm = 0xFFFF
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm4, %esi
-; AVX512BW-NEXT: vmovd %xmm5, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm6
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm2, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512BW-NEXT: vpextrw $1, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm5, %esi
-; AVX512BW-NEXT: vmovd %xmm6, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm7
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $2, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $3, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $4, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $5, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $6, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $7, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm7, %xmm5
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm5, %ymm4
-; AVX512BW-NEXT: vextracti128 $1, %ymm2, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm5, %edx
-; AVX512BW-NEXT: vextracti128 $1, %ymm0, %xmm6
-; AVX512BW-NEXT: vpextrw $1, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm5, %esi
-; AVX512BW-NEXT: vmovd %xmm6, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm7
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $2, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $3, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $4, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $5, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $6, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm7, %xmm7
-; AVX512BW-NEXT: vpextrw $7, %xmm5, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm6, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm7, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $1, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm2, %esi
-; AVX512BW-NEXT: vmovd %xmm0, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm6
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $2, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $3, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $4, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $5, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $6, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $7, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm0, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm0
-; AVX512BW-NEXT: vinserti128 $1, %xmm5, %ymm0, %ymm0
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512BW-NEXT: vpmovwb %zmm0, %ymm0
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512BW-NEXT: vpextrw $1, %xmm2, %edx
-; AVX512BW-NEXT: vextracti32x4 $3, %zmm1, %xmm4
-; AVX512BW-NEXT: vpextrw $1, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm2, %esi
-; AVX512BW-NEXT: vmovd %xmm4, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm5
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $2, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $3, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $4, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $5, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $6, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $7, %xmm2, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm4, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm5, %xmm2
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm3, %xmm4
-; AVX512BW-NEXT: vpextrw $1, %xmm4, %edx
-; AVX512BW-NEXT: vextracti32x4 $2, %zmm1, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm4, %esi
-; AVX512BW-NEXT: vmovd %xmm5, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm6
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4
-; AVX512BW-NEXT: vinserti128 $1, %xmm2, %ymm4, %ymm2
-; AVX512BW-NEXT: vextracti128 $1, %ymm3, %xmm4
-; AVX512BW-NEXT: vpextrw $1, %xmm4, %edx
-; AVX512BW-NEXT: vextracti128 $1, %ymm1, %xmm5
-; AVX512BW-NEXT: vpextrw $1, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm4, %esi
-; AVX512BW-NEXT: vmovd %xmm5, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm6
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $2, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $3, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $4, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $5, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $6, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm6, %xmm6
-; AVX512BW-NEXT: vpextrw $7, %xmm4, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm5, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $7, %edx, %xmm6, %xmm4
-; AVX512BW-NEXT: vpextrw $1, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $1, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vmovd %xmm3, %esi
-; AVX512BW-NEXT: vmovd %xmm1, %edi
-; AVX512BW-NEXT: cmpw %si, %di
-; AVX512BW-NEXT: movl $0, %esi
-; AVX512BW-NEXT: cmovgl %ecx, %esi
-; AVX512BW-NEXT: vmovd %esi, %xmm5
-; AVX512BW-NEXT: vpinsrw $1, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $2, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $2, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $2, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $3, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $3, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $3, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $4, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $4, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $4, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $5, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $5, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $5, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $6, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $6, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: movl $0, %edx
-; AVX512BW-NEXT: cmovgl %ecx, %edx
-; AVX512BW-NEXT: vpinsrw $6, %edx, %xmm5, %xmm5
-; AVX512BW-NEXT: vpextrw $7, %xmm3, %edx
-; AVX512BW-NEXT: vpextrw $7, %xmm1, %esi
-; AVX512BW-NEXT: cmpw %dx, %si
-; AVX512BW-NEXT: cmovgl %ecx, %eax
-; AVX512BW-NEXT: vpinsrw $7, %eax, %xmm5, %xmm1
-; AVX512BW-NEXT: vinserti128 $1, %xmm4, %ymm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512BW-NEXT: vpmovwb %zmm1, %ymm1
-; AVX512BW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512BW-NEXT: vpcmpgtw %zmm2, %zmm0, %k0
+; AVX512BW-NEXT: vpcmpgtw %zmm3, %zmm1, %k1
+; AVX512BW-NEXT: kunpckdq %k0, %k1, %k0
+; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
; AVX512BW-NEXT: retq
%1 = icmp sgt <64 x i16> %a0, %a1
ret <64 x i1> %1
Modified: llvm/trunk/test/CodeGen/X86/vselect-packss.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/vselect-packss.ll?rev=319059&r1=319058&r2=319059&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/vselect-packss.ll (original)
+++ llvm/trunk/test/CodeGen/X86/vselect-packss.ll Mon Nov 27 09:51:55 2017
@@ -260,271 +260,16 @@ define <16 x i8> @vselect_packss_v16i64(
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
-; AVX512F-LABEL: vselect_packss_v16i64:
-; AVX512F: # BB#0:
-; AVX512F-NEXT: vextracti32x4 $3, %zmm2, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rcx
-; AVX512F-NEXT: vextracti32x4 $3, %zmm0, %xmm7
-; AVX512F-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512F-NEXT: xorl %eax, %eax
-; AVX512F-NEXT: cmpq %rcx, %rdx
-; AVX512F-NEXT: movq $-1, %rcx
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm8
-; AVX512F-NEXT: vmovq %xmm6, %rdx
-; AVX512F-NEXT: vmovq %xmm7, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm6[0],xmm8[0]
-; AVX512F-NEXT: vextracti32x4 $2, %zmm2, %xmm7
-; AVX512F-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512F-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm9
-; AVX512F-NEXT: vmovq %xmm7, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm9[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm8, %ymm6, %ymm8
-; AVX512F-NEXT: vextracti128 $1, %ymm2, %xmm7
-; AVX512F-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512F-NEXT: vextracti128 $1, %ymm0, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm9
-; AVX512F-NEXT: vmovq %xmm7, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm9[0]
-; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512F-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vmovq %xmm2, %rdx
-; AVX512F-NEXT: vmovq %xmm0, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm0
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm7[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm0
-; AVX512F-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm8
-; AVX512F-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512F-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512F-NEXT: vextracti32x4 $3, %zmm1, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vmovq %xmm2, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm2
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm7[0]
-; AVX512F-NEXT: vextracti32x4 $2, %zmm3, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rdx
-; AVX512F-NEXT: vextracti32x4 $2, %zmm1, %xmm7
-; AVX512F-NEXT: vpextrq $1, %xmm7, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm0
-; AVX512F-NEXT: vmovq %xmm6, %rdx
-; AVX512F-NEXT: vmovq %xmm7, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm2
-; AVX512F-NEXT: vextracti128 $1, %ymm3, %xmm0
-; AVX512F-NEXT: vpextrq $1, %xmm0, %rdx
-; AVX512F-NEXT: vextracti128 $1, %ymm1, %xmm6
-; AVX512F-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm7
-; AVX512F-NEXT: vmovq %xmm0, %rdx
-; AVX512F-NEXT: vmovq %xmm6, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm0
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm7[0]
-; AVX512F-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512F-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: movl $0, %edx
-; AVX512F-NEXT: cmoveq %rcx, %rdx
-; AVX512F-NEXT: vmovq %rdx, %xmm6
-; AVX512F-NEXT: vmovq %xmm3, %rdx
-; AVX512F-NEXT: vmovq %xmm1, %rsi
-; AVX512F-NEXT: cmpq %rdx, %rsi
-; AVX512F-NEXT: cmoveq %rcx, %rax
-; AVX512F-NEXT: vmovq %rax, %xmm1
-; AVX512F-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm6[0]
-; AVX512F-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
-; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
-; AVX512F-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
-; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpblendvb %xmm0, %xmm4, %xmm5, %xmm0
-; AVX512F-NEXT: vzeroupper
-; AVX512F-NEXT: retq
-;
-; AVX512VL-LABEL: vselect_packss_v16i64:
-; AVX512VL: # BB#0:
-; AVX512VL-NEXT: vextracti32x4 $3, %zmm2, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rcx
-; AVX512VL-NEXT: vextracti32x4 $3, %zmm0, %xmm7
-; AVX512VL-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512VL-NEXT: xorl %eax, %eax
-; AVX512VL-NEXT: cmpq %rcx, %rdx
-; AVX512VL-NEXT: movq $-1, %rcx
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm8
-; AVX512VL-NEXT: vmovq %xmm6, %rdx
-; AVX512VL-NEXT: vmovq %xmm7, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm6
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm8 = xmm6[0],xmm8[0]
-; AVX512VL-NEXT: vextracti32x4 $2, %zmm2, %xmm7
-; AVX512VL-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512VL-NEXT: vextracti32x4 $2, %zmm0, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm9
-; AVX512VL-NEXT: vmovq %xmm7, %rdx
-; AVX512VL-NEXT: vmovq %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm6
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm9[0]
-; AVX512VL-NEXT: vinserti128 $1, %xmm8, %ymm6, %ymm8
-; AVX512VL-NEXT: vextracti128 $1, %ymm2, %xmm7
-; AVX512VL-NEXT: vpextrq $1, %xmm7, %rdx
-; AVX512VL-NEXT: vextracti128 $1, %ymm0, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm9
-; AVX512VL-NEXT: vmovq %xmm7, %rdx
-; AVX512VL-NEXT: vmovq %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm6
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm6 = xmm6[0],xmm9[0]
-; AVX512VL-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512VL-NEXT: vpextrq $1, %xmm0, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm7
-; AVX512VL-NEXT: vmovq %xmm2, %rdx
-; AVX512VL-NEXT: vmovq %xmm0, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm0
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm7[0]
-; AVX512VL-NEXT: vinserti128 $1, %xmm6, %ymm0, %ymm0
-; AVX512VL-NEXT: vinserti64x4 $1, %ymm8, %zmm0, %zmm0
-; AVX512VL-NEXT: vpmovqd %zmm0, %ymm8
-; AVX512VL-NEXT: vextracti32x4 $3, %zmm3, %xmm2
-; AVX512VL-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512VL-NEXT: vextracti32x4 $3, %zmm1, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm7
-; AVX512VL-NEXT: vmovq %xmm2, %rdx
-; AVX512VL-NEXT: vmovq %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm2
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm7[0]
-; AVX512VL-NEXT: vextracti32x4 $2, %zmm3, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rdx
-; AVX512VL-NEXT: vextracti32x4 $2, %zmm1, %xmm7
-; AVX512VL-NEXT: vpextrq $1, %xmm7, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm0
-; AVX512VL-NEXT: vmovq %xmm6, %rdx
-; AVX512VL-NEXT: vmovq %xmm7, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm6
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm0 = xmm6[0],xmm0[0]
-; AVX512VL-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
-; AVX512VL-NEXT: vextracti128 $1, %ymm3, %xmm2
-; AVX512VL-NEXT: vpextrq $1, %xmm2, %rdx
-; AVX512VL-NEXT: vextracti128 $1, %ymm1, %xmm6
-; AVX512VL-NEXT: vpextrq $1, %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm7
-; AVX512VL-NEXT: vmovq %xmm2, %rdx
-; AVX512VL-NEXT: vmovq %xmm6, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm2
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm2 = xmm2[0],xmm7[0]
-; AVX512VL-NEXT: vpextrq $1, %xmm3, %rdx
-; AVX512VL-NEXT: vpextrq $1, %xmm1, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: movl $0, %edx
-; AVX512VL-NEXT: cmoveq %rcx, %rdx
-; AVX512VL-NEXT: vmovq %rdx, %xmm6
-; AVX512VL-NEXT: vmovq %xmm3, %rdx
-; AVX512VL-NEXT: vmovq %xmm1, %rsi
-; AVX512VL-NEXT: cmpq %rdx, %rsi
-; AVX512VL-NEXT: cmoveq %rcx, %rax
-; AVX512VL-NEXT: vmovq %rax, %xmm1
-; AVX512VL-NEXT: vpunpcklqdq {{.*#+}} xmm1 = xmm1[0],xmm6[0]
-; AVX512VL-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512VL-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512VL-NEXT: vpmovqd %zmm0, %ymm0
-; AVX512VL-NEXT: vinserti64x4 $1, %ymm0, %zmm8, %zmm0
-; AVX512VL-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512VL-NEXT: vpblendvb %xmm0, %xmm4, %xmm5, %xmm0
-; AVX512VL-NEXT: vzeroupper
-; AVX512VL-NEXT: retq
+; AVX512-LABEL: vselect_packss_v16i64:
+; AVX512: # BB#0:
+; AVX512-NEXT: vpcmpeqq %zmm2, %zmm0, %k0
+; AVX512-NEXT: vpcmpeqq %zmm3, %zmm1, %k1
+; AVX512-NEXT: kunpckbw %k0, %k1, %k1
+; AVX512-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
+; AVX512-NEXT: vpmovdb %zmm0, %xmm0
+; AVX512-NEXT: vpblendvb %xmm0, %xmm4, %xmm5, %xmm0
+; AVX512-NEXT: vzeroupper
+; AVX512-NEXT: retq
%1 = icmp eq <16 x i64> %a0, %a1
%2 = sext <16 x i1> %1 to <16 x i8>
%3 = and <16 x i8> %2, %a2
More information about the llvm-commits
mailing list