[llvm] r345293 - [X86] Add KNL command lines to movmsk-cmp.ll.
Craig Topper via llvm-commits
llvm-commits at lists.llvm.org
Thu Oct 25 11:06:25 PDT 2018
Author: ctopper
Date: Thu Oct 25 11:06:25 2018
New Revision: 345293
URL: http://llvm.org/viewvc/llvm-project?rev=345293&view=rev
Log:
[X86] Add KNL command lines to movmsk-cmp.ll.
Some of this code looks pretty bad and we should probably still be using movmskb more with avx512f.
Modified:
llvm/trunk/test/CodeGen/X86/movmsk-cmp.ll
Modified: llvm/trunk/test/CodeGen/X86/movmsk-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/movmsk-cmp.ll?rev=345293&r1=345292&r2=345293&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/movmsk-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/movmsk-cmp.ll Thu Oct 25 11:06:25 2018
@@ -2,6 +2,7 @@
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefix=SSE2
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=AVX --check-prefix=AVX1
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx2 | FileCheck %s --check-prefix=AVX --check-prefix=AVX2
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefix=KNL
; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512vl,+avx512dq,+avx512bw | FileCheck %s --check-prefix=SKX
define i1 @allones_v16i8_sign(<16 x i8> %arg) {
@@ -19,6 +20,17 @@ define i1 @allones_v16i8_sign(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v16i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %xmm0, %k0
@@ -46,6 +58,17 @@ define i1 @allzeros_v16i8_sign(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %xmm0, %k0
@@ -92,6 +115,24 @@ define i1 @allones_v32i8_sign(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %ymm0, %k0
@@ -137,6 +178,23 @@ define i1 @allzeros_v32i8_sign(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %ymm0, %k0
@@ -202,6 +260,36 @@ define i1 @allones_v64i8_sign(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v64i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm2, %xmm2, %xmm2
+; KNL-NEXT: vpcmpgtb %ymm0, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: cmpq $-1, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v64i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %zmm0, %k0
@@ -264,6 +352,35 @@ define i1 @allzeros_v64i8_sign(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v64i8_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm2, %xmm2, %xmm2
+; KNL-NEXT: vpcmpgtb %ymm0, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v64i8_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %zmm0, %k0
@@ -298,6 +415,18 @@ define i1 @allones_v8i16_sign(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v8i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %xmm0, %k0
@@ -331,6 +460,18 @@ define i1 @allzeros_v8i16_sign(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %xmm0, %k0
@@ -381,6 +522,17 @@ define i1 @allones_v16i16_sign(<16 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %ymm0, %k0
@@ -432,6 +584,17 @@ define i1 @allzeros_v16i16_sign(<16 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %ymm0, %k0
@@ -499,6 +662,24 @@ define i1 @allones_v32i16_sign(<32 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm2, %xmm2, %xmm2
+; KNL-NEXT: vpcmpgtw %ymm0, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpgtw %ymm1, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %zmm0, %k0
@@ -564,6 +745,23 @@ define i1 @allzeros_v32i16_sign(<32 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i16_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm2, %xmm2, %xmm2
+; KNL-NEXT: vpcmpgtw %ymm0, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpgtw %ymm1, %ymm2, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i16_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovw2m %zmm0, %k0
@@ -592,6 +790,18 @@ define i1 @allones_v4i32_sign(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v4i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %xmm0, %k0
@@ -621,6 +831,17 @@ define i1 @allzeros_v4i32_sign(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %xmm0, %k0
@@ -656,6 +877,17 @@ define i1 @allones_v8i32_sign(<8 x i32>
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v8i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %ymm0, %k0
@@ -691,6 +923,17 @@ define i1 @allzeros_v8i32_sign(<8 x i32>
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %ymm0, %k0
@@ -756,6 +999,15 @@ define i1 @allones_v16i32_sign(<16 x i32
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %zmm0, %k0
@@ -821,6 +1073,15 @@ define i1 @allzeros_v16i32_sign(<16 x i3
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i32_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i32_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %zmm0, %k0
@@ -870,6 +1131,18 @@ define i1 @allones_v4i64_sign(<4 x i64>
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v4i64_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i64_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %ymm0, %k0
@@ -921,6 +1194,17 @@ define i1 @allzeros_v4i64_sign(<4 x i64>
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i64_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i64_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %ymm0, %k0
@@ -1015,6 +1299,16 @@ define i1 @allones_v8i64_sign(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v8i64_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i64_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %zmm0, %k0
@@ -1108,6 +1402,16 @@ define i1 @allzeros_v8i64_sign(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i64_sign:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i64_sign:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %zmm0, %k0
@@ -1138,6 +1442,18 @@ define i1 @allones_v16i8_and1(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v16i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %xmm0, %k0
@@ -1168,6 +1484,18 @@ define i1 @allzeros_v16i8_and1(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %xmm0, %k0
@@ -1222,6 +1550,25 @@ define i1 @allones_v32i8_and1(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %ymm0, %k0
@@ -1275,6 +1622,24 @@ define i1 @allzeros_v32i8_and1(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %ymm0, %k0
@@ -1355,6 +1720,38 @@ define i1 @allones_v64i8_and1(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v64i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: cmpq $-1, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v64i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %zmm0, %k0
@@ -1432,6 +1829,37 @@ define i1 @allzeros_v64i8_and1(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v64i8_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v64i8_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %zmm0, %k0
@@ -1469,6 +1897,19 @@ define i1 @allones_v8i16_and1(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v8i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %xmm0, %k0
@@ -1505,6 +1946,19 @@ define i1 @allzeros_v8i16_and1(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %xmm0, %k0
@@ -1562,6 +2016,18 @@ define i1 @allones_v16i16_and1(<16 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %ymm0, %k0
@@ -1641,6 +2107,26 @@ define i1 @allones_v32i16_and1(<32 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %zmm0, %k0
@@ -1718,6 +2204,25 @@ define i1 @allzeros_v32i16_and1(<32 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %zmm0, %k0
@@ -1776,6 +2281,18 @@ define i1 @allzeros_v16i16_and1(<16 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i16_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i16_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %ymm0, %k0
@@ -1807,6 +2324,18 @@ define i1 @allones_v4i32_and1(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v4i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
@@ -1840,6 +2369,17 @@ define i1 @allzeros_v4i32_and1(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1]
@@ -1891,6 +2431,17 @@ define i1 @allones_v8i32_and1(<8 x i32>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v8i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1]
@@ -1942,6 +2493,17 @@ define i1 @allzeros_v8i32_and1(<8 x i32>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1]
@@ -2020,6 +2582,15 @@ define i1 @allones_v16i32_and1(<16 x i32
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vpbroadcastd {{.*#+}} zmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
@@ -2098,6 +2669,15 @@ define i1 @allzeros_v16i32_and1(<16 x i3
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i32_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vpbroadcastd {{.*#+}} zmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i32_and1:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1]
@@ -2130,6 +2710,18 @@ define i1 @allones_v2i64_and1(<2 x i64>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v2i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $3, %al
+; KNL-NEXT: cmpb $3, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v2i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip), %xmm0, %k0
@@ -2162,6 +2754,17 @@ define i1 @allzeros_v2i64_and1(<2 x i64>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v2i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [1,1]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $3, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v2i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip), %xmm0, %k0
@@ -2215,6 +2818,18 @@ define i1 @allones_v4i64_and1(<4 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v4i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [1,1,1,1]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to4}, %ymm0, %k0
@@ -2270,6 +2885,17 @@ define i1 @allzeros_v4i64_and1(<4 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [1,1,1,1]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to4}, %ymm0, %k0
@@ -2355,6 +2981,15 @@ define i1 @allones_v8i64_and1(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v8i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
@@ -2439,6 +3074,15 @@ define i1 @allzeros_v8i64_and1(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i64_and1:
+; KNL: # %bb.0:
+; KNL-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i64_and1:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
@@ -2470,6 +3114,18 @@ define i1 @allones_v16i8_and4(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v16i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %xmm0, %k0
@@ -2500,6 +3156,18 @@ define i1 @allzeros_v16i8_and4(<16 x i8>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %xmm0, %k0
@@ -2554,6 +3222,25 @@ define i1 @allones_v32i8_and4(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %ymm0, %k0
@@ -2607,6 +3294,24 @@ define i1 @allzeros_v32i8_and4(<32 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %ymm0, %k0
@@ -2687,6 +3392,38 @@ define i1 @allones_v64i8_and4(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v64i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: cmpq $-1, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v64i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %zmm0, %k0
@@ -2764,6 +3501,37 @@ define i1 @allzeros_v64i8_and4(<64 x i8>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v64i8_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm3
+; KNL-NEXT: vptestmd %zmm3, %zmm3, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: vpcmpeqb %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %edx
+; KNL-NEXT: shll $16, %edx
+; KNL-NEXT: orl %eax, %edx
+; KNL-NEXT: shlq $32, %rdx
+; KNL-NEXT: orq %rcx, %rdx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v64i8_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmb {{.*}}(%rip), %zmm0, %k0
@@ -2801,6 +3569,19 @@ define i1 @allones_v8i16_and4(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v8i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %xmm0, %k0
@@ -2837,6 +3618,19 @@ define i1 @allzeros_v8i16_and4(<8 x i16>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpcmpeqw %xmm1, %xmm0, %xmm0
+; KNL-NEXT: vpmovsxwq %xmm0, %zmm0
+; KNL-NEXT: vptestmq %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %xmm0, %k0
@@ -2894,6 +3688,18 @@ define i1 @allones_v16i16_and4(<16 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %ymm0, %k0
@@ -2973,6 +3779,26 @@ define i1 @allones_v32i16_and4(<32 x i16
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v32i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: cmpl $-1, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v32i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %zmm0, %k0
@@ -3050,6 +3876,25 @@ define i1 @allzeros_v32i16_and4(<32 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v32i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm2 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm2, %ymm1, %ymm1
+; KNL-NEXT: vpand %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vpcmpeqw %ymm2, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: shll $16, %ecx
+; KNL-NEXT: orl %eax, %ecx
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v32i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %zmm0, %k0
@@ -3108,6 +3953,18 @@ define i1 @allzeros_v16i16_and4(<16 x i1
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i16_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vpand %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpcmpeqw %ymm1, %ymm0, %ymm0
+; KNL-NEXT: vpmovsxwd %ymm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i16_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmw {{.*}}(%rip), %ymm0, %k0
@@ -3139,6 +3996,18 @@ define i1 @allones_v4i32_and4(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v4i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
@@ -3172,6 +4041,17 @@ define i1 @allzeros_v4i32_and4(<4 x i32>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4]
@@ -3223,6 +4103,17 @@ define i1 @allones_v8i32_and4(<8 x i32>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v8i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4]
@@ -3274,6 +4165,17 @@ define i1 @allzeros_v8i32_and4(<8 x i32>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4]
@@ -3352,6 +4254,15 @@ define i1 @allones_v16i32_and4(<16 x i32
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v16i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vpbroadcastd {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: setb %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v16i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
@@ -3430,6 +4341,15 @@ define i1 @allzeros_v16i32_and4(<16 x i3
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v16i32_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vpbroadcastd {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
+; KNL-NEXT: vptestmd %zmm1, %zmm0, %k0
+; KNL-NEXT: kortestw %k0, %k0
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v16i32_and4:
; SKX: # %bb.0:
; SKX-NEXT: vpbroadcastd {{.*#+}} zmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4]
@@ -3462,6 +4382,18 @@ define i1 @allones_v2i64_and4(<2 x i64>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allones_v2i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $3, %al
+; KNL-NEXT: cmpb $3, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v2i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip), %xmm0, %k0
@@ -3494,6 +4426,17 @@ define i1 @allzeros_v2i64_and4(<2 x i64>
; AVX-NEXT: sete %al
; AVX-NEXT: retq
;
+; KNL-LABEL: allzeros_v2i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $3, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v2i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip), %xmm0, %k0
@@ -3547,6 +4490,18 @@ define i1 @allones_v4i64_and4(<4 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v4i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andb $15, %al
+; KNL-NEXT: cmpb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v4i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to4}, %ymm0, %k0
@@ -3602,6 +4557,17 @@ define i1 @allzeros_v4i64_and4(<4 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v4i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4]
+; KNL-NEXT: vptestmq %zmm1, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb $15, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v4i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to4}, %ymm0, %k0
@@ -3687,6 +4653,15 @@ define i1 @allones_v8i64_and4(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allones_v8i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: cmpb $-1, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allones_v8i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
@@ -3771,6 +4746,15 @@ define i1 @allzeros_v8i64_and4(<8 x i64>
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: allzeros_v8i64_and4:
+; KNL: # %bb.0:
+; KNL-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: testb %al, %al
+; KNL-NEXT: sete %al
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: allzeros_v8i64_and4:
; SKX: # %bb.0:
; SKX-NEXT: vptestmq {{.*}}(%rip){1to8}, %zmm0, %k0
@@ -3799,6 +4783,16 @@ define i32 @movmskpd(<2 x double> %x) {
; AVX-NEXT: vmovmskpd %xmm0, %eax
; AVX-NEXT: retq
;
+; KNL-LABEL: movmskpd:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andl $3, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskpd:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %xmm0, %k0
@@ -3823,6 +4817,16 @@ define i32 @movmskps(<4 x float> %x) {
; AVX-NEXT: vmovmskps %xmm0, %eax
; AVX-NEXT: retq
;
+; KNL-LABEL: movmskps:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andl $15, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskps:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %xmm0, %k0
@@ -3868,6 +4872,16 @@ define i32 @movmskpd256(<4 x double> %x)
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: movmskpd256:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtq %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: andl $15, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskpd256:
; SKX: # %bb.0:
; SKX-NEXT: vpmovq2m %ymm0, %k0
@@ -3901,6 +4915,16 @@ define i32 @movmskps256(<8 x float> %x)
; AVX-NEXT: vzeroupper
; AVX-NEXT: retq
;
+; KNL-LABEL: movmskps256:
+; KNL: # %bb.0:
+; KNL-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtd %zmm0, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: movzbl %al, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskps256:
; SKX: # %bb.0:
; SKX-NEXT: vpmovd2m %ymm0, %k0
@@ -3925,6 +4949,16 @@ define i32 @movmskb(<16 x i8> %x) {
; AVX-NEXT: vpmovmskb %xmm0, %eax
; AVX-NEXT: retq
;
+; KNL-LABEL: movmskb:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskb:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %xmm0, %k0
@@ -3964,6 +4998,22 @@ define i32 @movmskb256(<32 x i8> %x) {
; AVX2-NEXT: vzeroupper
; AVX2-NEXT: retq
;
+; KNL-LABEL: movmskb256:
+; KNL: # %bb.0:
+; KNL-NEXT: vpxor %xmm1, %xmm1, %xmm1
+; KNL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm1
+; KNL-NEXT: vptestmd %zmm1, %zmm1, %k0
+; KNL-NEXT: kmovw %k0, %ecx
+; KNL-NEXT: vextracti128 $1, %ymm0, %xmm0
+; KNL-NEXT: vpmovsxbd %xmm0, %zmm0
+; KNL-NEXT: vptestmd %zmm0, %zmm0, %k0
+; KNL-NEXT: kmovw %k0, %eax
+; KNL-NEXT: shll $16, %eax
+; KNL-NEXT: orl %ecx, %eax
+; KNL-NEXT: vzeroupper
+; KNL-NEXT: retq
+;
; SKX-LABEL: movmskb256:
; SKX: # %bb.0:
; SKX-NEXT: vpmovb2m %ymm0, %k0
More information about the llvm-commits
mailing list