[llvm] da92c12 - [X86] combineConcatVectorOps - add ADD/SUB/MUL handling
Simon Pilgrim via llvm-commits
llvm-commits at lists.llvm.org
Thu Mar 9 03:19:18 PST 2023
Author: Simon Pilgrim
Date: 2023-03-09T11:19:07Z
New Revision: da92c120ca6e0a03869dff5f1c5291331ae63587
URL: https://github.com/llvm/llvm-project/commit/da92c120ca6e0a03869dff5f1c5291331ae63587
DIFF: https://github.com/llvm/llvm-project/commit/da92c120ca6e0a03869dff5f1c5291331ae63587.diff
LOG: [X86] combineConcatVectorOps - add ADD/SUB/MUL handling
Added:
Modified:
llvm/lib/Target/X86/X86ISelLowering.cpp
llvm/test/CodeGen/X86/mulvi32.ll
llvm/test/CodeGen/X86/prefer-avx256-lzcnt.ll
llvm/test/CodeGen/X86/vector-lzcnt-256.ll
llvm/test/CodeGen/X86/vector-lzcnt-512.ll
Removed:
################################################################################
diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 355c5be568b3..a93eb810cdf8 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -56119,6 +56119,20 @@ static SDValue combineConcatVectorOps(const SDLoc &DL, MVT VT,
ConcatSubOperand(VT, Ops, 1), Op0.getOperand(2));
}
break;
+ case ISD::ADD:
+ case ISD::SUB:
+ case ISD::MUL:
+ if (!IsSplat && ((VT.is256BitVector() && Subtarget.hasInt256()) ||
+ (VT.is512BitVector() && Subtarget.useAVX512Regs() &&
+ (EltSizeInBits >= 32 || Subtarget.useBWIRegs())))) {
+ MVT SrcVT = Op0.getOperand(0).getSimpleValueType();
+ SrcVT = MVT::getVectorVT(SrcVT.getScalarType(),
+ NumOps * SrcVT.getVectorNumElements());
+ return DAG.getNode(Op0.getOpcode(), DL, VT,
+ ConcatSubOperand(SrcVT, Ops, 0),
+ ConcatSubOperand(SrcVT, Ops, 1));
+ }
+ break;
case ISD::FADD:
case ISD::FSUB:
case ISD::FMUL:
diff --git a/llvm/test/CodeGen/X86/mulvi32.ll b/llvm/test/CodeGen/X86/mulvi32.ll
index 226ec4f63a1b..88bc356d5695 100644
--- a/llvm/test/CodeGen/X86/mulvi32.ll
+++ b/llvm/test/CodeGen/X86/mulvi32.ll
@@ -288,11 +288,11 @@ define <4 x i64> @_mul4xi32toi64c(<4 x i32>, <4 x i32>) {
; AVX2: # %bb.0:
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
-; AVX2-NEXT: vpmuludq %xmm3, %xmm2, %xmm2
; AVX2-NEXT: vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
; AVX2-NEXT: vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
-; AVX2-NEXT: vpmuludq %xmm1, %xmm0, %xmm0
+; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm3, %ymm1
; AVX2-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
+; AVX2-NEXT: vpmuludq %ymm1, %ymm0, %ymm0
; AVX2-NEXT: retq
%lower0 = shufflevector <4 x i32> %0, <4 x i32> undef, <4 x i32> <i32 0, i32 undef, i32 1, i32 undef>
%lower1 = shufflevector <4 x i32> %1, <4 x i32> undef, <4 x i32> <i32 0, i32 undef, i32 1, i32 undef>
diff --git a/llvm/test/CodeGen/X86/prefer-avx256-lzcnt.ll b/llvm/test/CodeGen/X86/prefer-avx256-lzcnt.ll
index 0655cc9c9f47..056aeaba9271 100644
--- a/llvm/test/CodeGen/X86/prefer-avx256-lzcnt.ll
+++ b/llvm/test/CodeGen/X86/prefer-avx256-lzcnt.ll
@@ -64,17 +64,15 @@ define <16 x i8> @testv16i8(<16 x i8> %in) {
define <16 x i16> @testv16i16(<16 x i16> %in) {
; AVX256-LABEL: testv16i16:
; AVX256: # %bb.0:
-; AVX256-NEXT: vextracti128 $1, %ymm0, %xmm1
-; AVX256-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm1[0],zero,xmm1[1],zero,xmm1[2],zero,xmm1[3],zero,xmm1[4],zero,xmm1[5],zero,xmm1[6],zero,xmm1[7],zero
+; AVX256-NEXT: vpmovzxwd {{.*#+}} ymm1 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; AVX256-NEXT: vplzcntd %ymm1, %ymm1
; AVX256-NEXT: vpmovdw %ymm1, %xmm1
-; AVX256-NEXT: vmovdqa {{.*#+}} xmm2 = [16,16,16,16,16,16,16,16]
-; AVX256-NEXT: vpsubw %xmm2, %xmm1, %xmm1
+; AVX256-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX256-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero
; AVX256-NEXT: vplzcntd %ymm0, %ymm0
; AVX256-NEXT: vpmovdw %ymm0, %xmm0
-; AVX256-NEXT: vpsubw %xmm2, %xmm0, %xmm0
-; AVX256-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX256-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX256-NEXT: vpsubw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; AVX256-NEXT: retq
;
; AVX512-LABEL: testv16i16:
@@ -104,17 +102,15 @@ define <32 x i8> @testv32i8(<32 x i8> %in) {
;
; AVX512-LABEL: testv32i8:
; AVX512: # %bb.0:
-; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
-; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
+; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm1, %zmm1
; AVX512-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512-NEXT: vmovdqa {{.*#+}} xmm2 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512-NEXT: vpsubb %xmm2, %xmm1, %xmm1
+; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm0, %zmm0
; AVX512-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512-NEXT: vpsubb %xmm2, %xmm0, %xmm0
-; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT: vpsubb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; AVX512-NEXT: retq
%out = call <32 x i8> @llvm.ctlz.v32i8(<32 x i8> %in, i1 false)
ret <32 x i8> %out
diff --git a/llvm/test/CodeGen/X86/vector-lzcnt-256.ll b/llvm/test/CodeGen/X86/vector-lzcnt-256.ll
index 7c481b3d2b37..f9cac4ba9714 100644
--- a/llvm/test/CodeGen/X86/vector-lzcnt-256.ll
+++ b/llvm/test/CodeGen/X86/vector-lzcnt-256.ll
@@ -976,17 +976,15 @@ define <32 x i8> @testv32i8(<32 x i8> %in) nounwind {
;
; AVX512-LABEL: testv32i8:
; AVX512: # %bb.0:
-; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
-; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
+; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm1, %zmm1
; AVX512-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512-NEXT: vmovdqa {{.*#+}} xmm2 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512-NEXT: vpsubb %xmm2, %xmm1, %xmm1
+; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm0, %zmm0
; AVX512-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512-NEXT: vpsubb %xmm2, %xmm0, %xmm0
-; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT: vpsubb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; AVX512-NEXT: retq
;
; X32-AVX-LABEL: testv32i8:
@@ -1070,17 +1068,15 @@ define <32 x i8> @testv32i8u(<32 x i8> %in) nounwind {
;
; AVX512-LABEL: testv32i8u:
; AVX512: # %bb.0:
-; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm1
-; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
+; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm1, %zmm1
; AVX512-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512-NEXT: vmovdqa {{.*#+}} xmm2 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512-NEXT: vpsubb %xmm2, %xmm1, %xmm1
+; AVX512-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512-NEXT: vplzcntd %zmm0, %zmm0
; AVX512-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512-NEXT: vpsubb %xmm2, %xmm0, %xmm0
-; AVX512-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX512-NEXT: vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT: vpsubb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
; AVX512-NEXT: retq
;
; X32-AVX-LABEL: testv32i8u:
diff --git a/llvm/test/CodeGen/X86/vector-lzcnt-512.ll b/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
index 5b7cc54c8eb6..78be6dfbff06 100644
--- a/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
+++ b/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
@@ -346,17 +346,15 @@ define <32 x i16> @testv32i16(<32 x i16> %in) nounwind {
;
; AVX512CDBW-LABEL: testv32i16:
; AVX512CDBW: # %bb.0:
-; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
+; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512CDBW-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CDBW-NEXT: vpmovdw %zmm1, %ymm1
-; AVX512CDBW-NEXT: vmovdqa {{.*#+}} ymm2 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
-; AVX512CDBW-NEXT: vpsubw %ymm2, %ymm1, %ymm1
+; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm0
; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512CDBW-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CDBW-NEXT: vpmovdw %zmm0, %ymm0
-; AVX512CDBW-NEXT: vpsubw %ymm2, %ymm0, %ymm0
-; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512CDBW-NEXT: vpsubw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm0
; AVX512CDBW-NEXT: retq
;
; AVX512BW-LABEL: testv32i16:
@@ -432,17 +430,15 @@ define <32 x i16> @testv32i16u(<32 x i16> %in) nounwind {
;
; AVX512CDBW-LABEL: testv32i16u:
; AVX512CDBW: # %bb.0:
-; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero
+; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512CDBW-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CDBW-NEXT: vpmovdw %zmm1, %ymm1
-; AVX512CDBW-NEXT: vmovdqa {{.*#+}} ymm2 = [16,16,16,16,16,16,16,16,16,16,16,16,16,16,16,16]
-; AVX512CDBW-NEXT: vpsubw %ymm2, %ymm1, %ymm1
+; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm0
; AVX512CDBW-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero
; AVX512CDBW-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CDBW-NEXT: vpmovdw %zmm0, %ymm0
-; AVX512CDBW-NEXT: vpsubw %ymm2, %ymm0, %ymm0
-; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512CDBW-NEXT: vpsubw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm0
; AVX512CDBW-NEXT: retq
;
; AVX512BW-LABEL: testv32i16u:
@@ -504,55 +500,49 @@ define <64 x i8> @testv64i8(<64 x i8> %in) nounwind {
; AVX512CD-LABEL: testv64i8:
; AVX512CD: # %bb.0:
; AVX512CD-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CD-NEXT: vextracti128 $1, %ymm1, %xmm2
-; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CD-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm3 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CD-NEXT: vextracti128 $1, %ymm1, %xmm1
; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CD-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm1, %xmm1
-; AVX512CD-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512CD-NEXT: vextracti128 $1, %ymm0, %xmm2
-; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
-; AVX512CD-NEXT: vplzcntd %zmm2, %zmm2
-; AVX512CD-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CD-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
+; AVX512CD-NEXT: vpsubb %ymm2, %ymm1, %ymm1
+; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm3 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
+; AVX512CD-NEXT: vplzcntd %zmm3, %zmm3
+; AVX512CD-NEXT: vpmovdb %zmm3, %xmm3
+; AVX512CD-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CD-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm0, %xmm0
-; AVX512CD-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
+; AVX512CD-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
+; AVX512CD-NEXT: vpsubb %ymm2, %ymm0, %ymm0
; AVX512CD-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; AVX512CD-NEXT: retq
;
; AVX512CDBW-LABEL: testv64i8:
; AVX512CDBW: # %bb.0:
; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CDBW-NEXT: vextracti128 $1, %ymm1, %xmm2
-; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CDBW-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CDBW-NEXT: vmovdqa {{.*#+}} xmm3 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CDBW-NEXT: vextracti128 $1, %ymm1, %xmm1
; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CDBW-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm1, %xmm1
-; AVX512CDBW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512CDBW-NEXT: vextracti128 $1, %ymm0, %xmm2
-; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CDBW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CDBW-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CDBW-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CDBW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm0, %xmm0
-; AVX512CDBW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
+; AVX512CDBW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512CDBW-NEXT: vpsubb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm0
; AVX512CDBW-NEXT: retq
;
; AVX512BW-LABEL: testv64i8:
@@ -598,55 +588,49 @@ define <64 x i8> @testv64i8u(<64 x i8> %in) nounwind {
; AVX512CD-LABEL: testv64i8u:
; AVX512CD: # %bb.0:
; AVX512CD-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CD-NEXT: vextracti128 $1, %ymm1, %xmm2
-; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CD-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm3 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CD-NEXT: vextracti128 $1, %ymm1, %xmm1
; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CD-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm1, %xmm1
-; AVX512CD-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512CD-NEXT: vextracti128 $1, %ymm0, %xmm2
-; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
-; AVX512CD-NEXT: vplzcntd %zmm2, %zmm2
-; AVX512CD-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CD-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
+; AVX512CD-NEXT: vpsubb %ymm2, %ymm1, %ymm1
+; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm3 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
+; AVX512CD-NEXT: vplzcntd %zmm3, %zmm3
+; AVX512CD-NEXT: vpmovdb %zmm3, %xmm3
+; AVX512CD-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512CD-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CD-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CD-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512CD-NEXT: vpsubb %xmm3, %xmm0, %xmm0
-; AVX512CD-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
+; AVX512CD-NEXT: vinserti128 $1, %xmm0, %ymm3, %ymm0
+; AVX512CD-NEXT: vpsubb %ymm2, %ymm0, %ymm0
; AVX512CD-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
; AVX512CD-NEXT: retq
;
; AVX512CDBW-LABEL: testv64i8u:
; AVX512CDBW: # %bb.0:
; AVX512CDBW-NEXT: vextracti64x4 $1, %zmm0, %ymm1
-; AVX512CDBW-NEXT: vextracti128 $1, %ymm1, %xmm2
-; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CDBW-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CDBW-NEXT: vmovdqa {{.*#+}} xmm3 = [24,24,24,24,24,24,24,24,24,24,24,24,24,24,24,24]
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CDBW-NEXT: vextracti128 $1, %ymm1, %xmm1
; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm1 = xmm1[0],zero,zero,zero,xmm1[1],zero,zero,zero,xmm1[2],zero,zero,zero,xmm1[3],zero,zero,zero,xmm1[4],zero,zero,zero,xmm1[5],zero,zero,zero,xmm1[6],zero,zero,zero,xmm1[7],zero,zero,zero,xmm1[8],zero,zero,zero,xmm1[9],zero,zero,zero,xmm1[10],zero,zero,zero,xmm1[11],zero,zero,zero,xmm1[12],zero,zero,zero,xmm1[13],zero,zero,zero,xmm1[14],zero,zero,zero,xmm1[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm1, %zmm1
; AVX512CDBW-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm1, %xmm1
-; AVX512CDBW-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512CDBW-NEXT: vextracti128 $1, %ymm0, %xmm2
-; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero,xmm2[2],zero,zero,zero,xmm2[3],zero,zero,zero,xmm2[4],zero,zero,zero,xmm2[5],zero,zero,zero,xmm2[6],zero,zero,zero,xmm2[7],zero,zero,zero,xmm2[8],zero,zero,zero,xmm2[9],zero,zero,zero,xmm2[10],zero,zero,zero,xmm2[11],zero,zero,zero,xmm2[12],zero,zero,zero,xmm2[13],zero,zero,zero,xmm2[14],zero,zero,zero,xmm2[15],zero,zero,zero
+; AVX512CDBW-NEXT: vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm2 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm2, %zmm2
; AVX512CDBW-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm2, %xmm2
+; AVX512CDBW-NEXT: vextracti128 $1, %ymm0, %xmm0
; AVX512CDBW-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero
; AVX512CDBW-NEXT: vplzcntd %zmm0, %zmm0
; AVX512CDBW-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512CDBW-NEXT: vpsubb %xmm3, %xmm0, %xmm0
-; AVX512CDBW-NEXT: vinserti128 $1, %xmm2, %ymm0, %ymm0
+; AVX512CDBW-NEXT: vinserti128 $1, %xmm0, %ymm2, %ymm0
; AVX512CDBW-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0
+; AVX512CDBW-NEXT: vpsubb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm0
; AVX512CDBW-NEXT: retq
;
; AVX512BW-LABEL: testv64i8u:
More information about the llvm-commits
mailing list