[llvm] 4023329 - [X86] collectConcatOps - add ability to recurse through insert_subvector chains

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Tue Apr 9 05:24:04 PDT 2024


Author: Simon Pilgrim
Date: 2024-04-09T13:23:44+01:00
New Revision: 4023329bbfab5f2abc5c035aad05820724a484cf

URL: https://github.com/llvm/llvm-project/commit/4023329bbfab5f2abc5c035aad05820724a484cf
DIFF: https://github.com/llvm/llvm-project/commit/4023329bbfab5f2abc5c035aad05820724a484cf.diff

LOG: [X86] collectConcatOps - add ability to recurse through insert_subvector chains

Allows us to match insert_subvector(insert_subvector(undef, insert_subvector(insert_subvector(undef, x, 0), y, 1), 0), 0),
                                    insert_subvector(insert_subvector(undef, z, 0), w, 1), 2)

Added: 
    

Modified: 
    llvm/lib/Target/X86/X86ISelLowering.cpp
    llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-4.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-8.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index d4d7b29596636d..5405c2921e5188 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -3992,7 +3992,6 @@ static bool collectConcatOps(SDNode *N, SmallVectorImpl<SDValue> &Ops,
     EVT VT = Src.getValueType();
     EVT SubVT = Sub.getValueType();
 
-    // TODO - Handle more general insert_subvector chains.
     if (VT.getSizeInBits() == (SubVT.getSizeInBits() * 2)) {
       // insert_subvector(undef, x, lo)
       if (Idx == 0 && Src.isUndef()) {
@@ -4005,8 +4004,19 @@ static bool collectConcatOps(SDNode *N, SmallVectorImpl<SDValue> &Ops,
         if (Src.getOpcode() == ISD::INSERT_SUBVECTOR &&
             Src.getOperand(1).getValueType() == SubVT &&
             isNullConstant(Src.getOperand(2))) {
-          Ops.push_back(Src.getOperand(1));
-          Ops.push_back(Sub);
+          // Attempt to recurse into inner (matching) concats.
+          SDValue Lo = Src.getOperand(1);
+          SDValue Hi = Sub;
+          SmallVector<SDValue, 2> LoOps, HiOps;
+          if (collectConcatOps(Lo.getNode(), LoOps, DAG) &&
+              collectConcatOps(Hi.getNode(), HiOps, DAG) &&
+              LoOps.size() == HiOps.size()) {
+            Ops.append(LoOps);
+            Ops.append(HiOps);
+            return true;
+          }
+          Ops.push_back(Lo);
+          Ops.push_back(Hi);
           return true;
         }
         // insert_subvector(x, extract_subvector(x, lo), hi)

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-4.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-4.ll
index 68b180ef525654..f054c7edfff16a 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-4.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-4.ll
@@ -1589,104 +1589,81 @@ define void @store_i16_stride4_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ;
 ; AVX512-LABEL: store_i16_stride4_vf32:
 ; AVX512:       # %bb.0:
-; AVX512-NEXT:    vmovdqa (%rcx), %xmm0
-; AVX512-NEXT:    vmovdqa 16(%rcx), %xmm2
-; AVX512-NEXT:    vmovdqa 32(%rcx), %xmm9
-; AVX512-NEXT:    vmovdqa 48(%rcx), %xmm5
-; AVX512-NEXT:    vmovdqa (%rdx), %xmm1
-; AVX512-NEXT:    vmovdqa 16(%rdx), %xmm3
-; AVX512-NEXT:    vmovdqa 32(%rdx), %xmm10
-; AVX512-NEXT:    vmovdqa 48(%rdx), %xmm6
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm7 = xmm4[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm4, %ymm7, %ymm4
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm3, %ymm2
-; AVX512-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm13
-; AVX512-NEXT:    vmovdqa (%rsi), %xmm2
-; AVX512-NEXT:    vmovdqa 16(%rsi), %xmm4
-; AVX512-NEXT:    vmovdqa 32(%rsi), %xmm11
-; AVX512-NEXT:    vmovdqa 48(%rsi), %xmm7
-; AVX512-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512-NEXT:    vmovdqa 16(%rdi), %xmm14
-; AVX512-NEXT:    vmovdqa 32(%rdi), %xmm12
-; AVX512-NEXT:    vmovdqa 48(%rdi), %xmm8
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm4[4],xmm14[5],xmm4[5],xmm14[6],xmm4[6],xmm14[7],xmm4[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm14[0],xmm4[0],xmm14[1],xmm4[1],xmm14[2],xmm4[2],xmm14[3],xmm4[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm14 = xmm4[0],zero,xmm4[1],zero
+; AVX512-NEXT:    vmovdqa (%rsi), %xmm0
+; AVX512-NEXT:    vmovdqa 16(%rsi), %xmm1
+; AVX512-NEXT:    vmovdqa 32(%rsi), %xmm10
+; AVX512-NEXT:    vmovdqa 48(%rsi), %xmm6
+; AVX512-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512-NEXT:    vmovdqa 16(%rdi), %xmm3
+; AVX512-NEXT:    vmovdqa 32(%rdi), %xmm11
+; AVX512-NEXT:    vmovdqa 48(%rdi), %xmm7
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm4, %ymm14, %ymm4
-; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm4, %zmm4
+; AVX512-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
+; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm3, %ymm1
+; AVX512-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm1
+; AVX512-NEXT:    vmovdqa (%rcx), %xmm3
+; AVX512-NEXT:    vmovdqa 16(%rcx), %xmm5
+; AVX512-NEXT:    vmovdqa 32(%rcx), %xmm12
+; AVX512-NEXT:    vmovdqa 48(%rcx), %xmm8
+; AVX512-NEXT:    vmovdqa (%rdx), %xmm4
+; AVX512-NEXT:    vmovdqa 16(%rdx), %xmm13
+; AVX512-NEXT:    vmovdqa 32(%rdx), %xmm14
+; AVX512-NEXT:    vmovdqa 48(%rdx), %xmm9
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm5[4],xmm13[5],xmm5[5],xmm13[6],xmm5[6],xmm13[7],xmm5[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
+; AVX512-NEXT:    vpmovsxbd {{.*#+}} zmm5 = [0,0,1,1,2,2,3,3,16,16,17,17,18,18,19,19]
+; AVX512-NEXT:    vpermt2d %zmm15, %zmm5, %zmm13
 ; AVX512-NEXT:    movw $-21846, %ax # imm = 0xAAAA
 ; AVX512-NEXT:    kmovw %eax, %k1
-; AVX512-NEXT:    vmovdqa32 %zmm13, %zmm4 {%k1}
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
+; AVX512-NEXT:    vmovdqa32 %zmm13, %zmm1 {%k1}
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm13[0],zero,xmm13[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm13, %ymm14, %ymm13
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm10 = xmm9[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; AVX512-NEXT:    vinserti64x4 $1, %ymm13, %zmm9, %zmm9
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm13 = xmm10[0],zero,xmm10[1],zero
+; AVX512-NEXT:    vinserti128 $1, %xmm13, %ymm15, %ymm13
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm10, %ymm13, %ymm10
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
+; AVX512-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
+; AVX512-NEXT:    vinserti64x4 $1, %ymm13, %zmm10, %zmm10
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm11 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
+; AVX512-NEXT:    vpermt2d %zmm11, %zmm5, %zmm12
+; AVX512-NEXT:    vmovdqa32 %zmm12, %zmm10 {%k1}
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm11 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
 ; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
 ; AVX512-NEXT:    vinserti128 $1, %xmm11, %ymm12, %ymm11
-; AVX512-NEXT:    vinserti64x4 $1, %ymm10, %zmm11, %zmm10
-; AVX512-NEXT:    vmovdqa32 %zmm9, %zmm10 {%k1}
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm11 = xmm9[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm9, %ymm11, %ymm9
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512-NEXT:    vinserti64x4 $1, %ymm9, %zmm5, %zmm5
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm9 = xmm6[0],zero,xmm6[1],zero
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm6, %ymm9, %ymm6
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
+; AVX512-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
+; AVX512-NEXT:    vinserti64x4 $1, %ymm11, %zmm6, %zmm6
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm7 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
+; AVX512-NEXT:    vpermt2d %zmm7, %zmm5, %zmm8
+; AVX512-NEXT:    vmovdqa32 %zmm8, %zmm6 {%k1}
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm7 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
 ; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
 ; AVX512-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
-; AVX512-NEXT:    vinserti64x4 $1, %ymm6, %zmm7, %zmm6
-; AVX512-NEXT:    vmovdqa32 %zmm5, %zmm6 {%k1}
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm7 = xmm5[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm5, %ymm7, %ymm5
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm1 = xmm0[0,0,1,1]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
-; AVX512-NEXT:    vinserti64x4 $1, %ymm5, %zmm0, %zmm0
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm5 = xmm1[0],zero,xmm1[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm5, %ymm1
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm3, %ymm2
-; AVX512-NEXT:    vinserti64x4 $1, %ymm1, %zmm2, %zmm1
-; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm1 {%k1}
-; AVX512-NEXT:    vmovdqa64 %zmm1, (%r8)
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
+; AVX512-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm0
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
+; AVX512-NEXT:    vpermt2d %zmm2, %zmm5, %zmm3
+; AVX512-NEXT:    vmovdqa32 %zmm3, %zmm0 {%k1}
+; AVX512-NEXT:    vmovdqa64 %zmm0, (%r8)
 ; AVX512-NEXT:    vmovdqa64 %zmm6, 192(%r8)
 ; AVX512-NEXT:    vmovdqa64 %zmm10, 128(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm4, 64(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm1, 64(%r8)
 ; AVX512-NEXT:    vzeroupper
 ; AVX512-NEXT:    retq
 ;
@@ -1772,100 +1749,81 @@ define void @store_i16_stride4_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ;
 ; AVX512DQ-LABEL: store_i16_stride4_vf32:
 ; AVX512DQ:       # %bb.0:
-; AVX512DQ-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512DQ-NEXT:    vmovdqa 16(%rsi), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 32(%rsi), %xmm7
-; AVX512DQ-NEXT:    vmovdqa 48(%rsi), %xmm5
+; AVX512DQ-NEXT:    vmovdqa (%rsi), %xmm0
+; AVX512DQ-NEXT:    vmovdqa 16(%rsi), %xmm1
+; AVX512DQ-NEXT:    vmovdqa 32(%rsi), %xmm10
+; AVX512DQ-NEXT:    vmovdqa 48(%rsi), %xmm6
 ; AVX512DQ-NEXT:    vmovdqa (%rdi), %xmm2
 ; AVX512DQ-NEXT:    vmovdqa 16(%rdi), %xmm3
-; AVX512DQ-NEXT:    vmovdqa 32(%rdi), %xmm10
-; AVX512DQ-NEXT:    vmovdqa 48(%rdi), %xmm6
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm8 = xmm4[0],zero,xmm4[1],zero
+; AVX512DQ-NEXT:    vmovdqa 32(%rdi), %xmm11
+; AVX512DQ-NEXT:    vmovdqa 48(%rdi), %xmm7
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm4, %ymm8, %ymm4
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm1[0],zero,xmm1[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm1, %ymm3, %ymm1
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm1
 ; AVX512DQ-NEXT:    vmovdqa (%rcx), %xmm3
-; AVX512DQ-NEXT:    vmovdqa 16(%rcx), %xmm13
-; AVX512DQ-NEXT:    vmovdqa 32(%rcx), %xmm11
+; AVX512DQ-NEXT:    vmovdqa 16(%rcx), %xmm5
+; AVX512DQ-NEXT:    vmovdqa 32(%rcx), %xmm12
 ; AVX512DQ-NEXT:    vmovdqa 48(%rcx), %xmm8
 ; AVX512DQ-NEXT:    vmovdqa (%rdx), %xmm4
-; AVX512DQ-NEXT:    vmovdqa 16(%rdx), %xmm14
-; AVX512DQ-NEXT:    vmovdqa 32(%rdx), %xmm12
+; AVX512DQ-NEXT:    vmovdqa 16(%rdx), %xmm13
+; AVX512DQ-NEXT:    vmovdqa 32(%rdx), %xmm14
 ; AVX512DQ-NEXT:    vmovdqa 48(%rdx), %xmm9
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm14 = xmm13[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm13, %ymm14, %ymm13
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm5[4],xmm13[5],xmm5[5],xmm13[6],xmm5[6],xmm13[7],xmm5[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm13[0],xmm5[0],xmm13[1],xmm5[1],xmm13[2],xmm5[2],xmm13[3],xmm5[3]
+; AVX512DQ-NEXT:    vpmovsxbd {{.*#+}} zmm5 = [0,0,1,1,2,2,3,3,16,16,17,17,18,18,19,19]
+; AVX512DQ-NEXT:    vpermt2d %zmm15, %zmm5, %zmm13
 ; AVX512DQ-NEXT:    movw $-21846, %ax # imm = 0xAAAA
 ; AVX512DQ-NEXT:    kmovw %eax, %k1
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm15, %zmm13, %zmm0 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm10[4],xmm7[4],xmm10[5],xmm7[5],xmm10[6],xmm7[6],xmm10[7],xmm7[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm14 = xmm13[0],zero,xmm13[1],zero
+; AVX512DQ-NEXT:    vmovdqa32 %zmm13, %zmm1 {%k1}
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm13[0],zero,xmm13[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm13, %ymm14, %ymm13
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm10[0],xmm7[0],xmm10[1],xmm7[1],xmm10[2],xmm7[2],xmm10[3],xmm7[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm10 = xmm7[0],zero,xmm7[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm7, %ymm10, %ymm7
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm13, %zmm7, %zmm7
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm10[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm10, %ymm13, %ymm10
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm12 = xmm11[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm11, %ymm12, %ymm11
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm10, %zmm11, %zmm7 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm13, %ymm15, %ymm13
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
 ; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm11 = xmm10[0],zero,xmm10[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm10 = xmm10[2,2,3,3]
 ; AVX512DQ-NEXT:    vinserti128 $1, %xmm10, %ymm11, %ymm10
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm10 = xmm6[0,0,1,1]
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm13, %zmm10, %zmm10
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm11 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm11, %zmm5, %zmm12
+; AVX512DQ-NEXT:    vmovdqa32 %zmm12, %zmm10 {%k1}
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm11 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm12 = xmm11[0],zero,xmm11[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm11, %ymm12, %ymm11
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm10, %ymm6
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm11, %zmm6, %zmm6
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm7 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
 ; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm9 = xmm8[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm8, %ymm9, %ymm8
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm6, %zmm8, %zmm5 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm8 = xmm6[0],zero,xmm6[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm8, %ymm6
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm6, %zmm1, %zmm1
+; AVX512DQ-NEXT:    vpermt2d %zmm7, %zmm5, %zmm8
+; AVX512DQ-NEXT:    vmovdqa32 %zmm8, %zmm6 {%k1}
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm7 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm8 = xmm7[0],zero,xmm7[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm7, %ymm8, %ymm7
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm2[0],xmm0[0],xmm2[1],xmm0[1],xmm2[2],xmm0[2],xmm2[3],xmm0[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm0[0],zero,xmm0[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm0
 ; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm2[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm2, %ymm6, %ymm2
 ; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm3, %ymm4, %ymm3
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm2, %zmm3, %zmm1 {%k1}
-; AVX512DQ-NEXT:    vmovdqa64 %zmm1, (%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm5, 192(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm7, 128(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm0, 64(%r8)
+; AVX512DQ-NEXT:    vpermt2d %zmm2, %zmm5, %zmm3
+; AVX512DQ-NEXT:    vmovdqa32 %zmm3, %zmm0 {%k1}
+; AVX512DQ-NEXT:    vmovdqa64 %zmm0, (%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm6, 192(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm10, 128(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm1, 64(%r8)
 ; AVX512DQ-NEXT:    vzeroupper
 ; AVX512DQ-NEXT:    retq
 ;
@@ -3112,201 +3070,155 @@ define void @store_i16_stride4_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ;
 ; AVX512-LABEL: store_i16_stride4_vf64:
 ; AVX512:       # %bb.0:
-; AVX512-NEXT:    vmovdqa64 (%rcx), %xmm20
-; AVX512-NEXT:    vmovdqa 16(%rcx), %xmm2
-; AVX512-NEXT:    vmovdqa 32(%rcx), %xmm10
-; AVX512-NEXT:    vmovdqa 48(%rcx), %xmm5
-; AVX512-NEXT:    vmovdqa (%rdx), %xmm1
-; AVX512-NEXT:    vmovdqa 16(%rdx), %xmm3
-; AVX512-NEXT:    vmovdqa 32(%rdx), %xmm11
-; AVX512-NEXT:    vmovdqa 48(%rdx), %xmm6
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm2[4],xmm3[5],xmm2[5],xmm3[6],xmm2[6],xmm3[7],xmm2[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm7 = xmm4[0,0,1,1]
+; AVX512-NEXT:    vmovdqa64 (%rsi), %xmm19
+; AVX512-NEXT:    vmovdqa 16(%rsi), %xmm0
+; AVX512-NEXT:    vmovdqa 32(%rsi), %xmm11
+; AVX512-NEXT:    vmovdqa 48(%rsi), %xmm6
+; AVX512-NEXT:    vmovdqa64 (%rdi), %xmm20
+; AVX512-NEXT:    vmovdqa 16(%rdi), %xmm3
+; AVX512-NEXT:    vmovdqa 32(%rdi), %xmm12
+; AVX512-NEXT:    vmovdqa 48(%rdi), %xmm7
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm4, %ymm7, %ymm4
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm2[0],xmm3[1],xmm2[1],xmm3[2],xmm2[2],xmm3[3],xmm2[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm2[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm3, %ymm2
-; AVX512-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm9
-; AVX512-NEXT:    vmovdqa (%rsi), %xmm2
-; AVX512-NEXT:    vmovdqa 16(%rsi), %xmm3
-; AVX512-NEXT:    vmovdqa 32(%rsi), %xmm12
-; AVX512-NEXT:    vmovdqa 48(%rsi), %xmm7
-; AVX512-NEXT:    vmovdqa (%rdi), %xmm4
-; AVX512-NEXT:    vmovdqa 16(%rdi), %xmm14
-; AVX512-NEXT:    vmovdqa 32(%rdi), %xmm13
-; AVX512-NEXT:    vmovdqa 48(%rdi), %xmm8
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm3[4],xmm14[5],xmm3[5],xmm14[6],xmm3[6],xmm14[7],xmm3[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm14[0],xmm3[0],xmm14[1],xmm3[1],xmm14[2],xmm3[2],xmm14[3],xmm3[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm14 = xmm3[0],zero,xmm3[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm3, %ymm14, %ymm3
-; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm3, %zmm17
+; AVX512-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
+; AVX512-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm18
+; AVX512-NEXT:    vmovdqa (%rcx), %xmm3
+; AVX512-NEXT:    vmovdqa 16(%rcx), %xmm5
+; AVX512-NEXT:    vmovdqa 32(%rcx), %xmm13
+; AVX512-NEXT:    vmovdqa 48(%rcx), %xmm9
+; AVX512-NEXT:    vmovdqa (%rdx), %xmm4
+; AVX512-NEXT:    vmovdqa 16(%rdx), %xmm8
+; AVX512-NEXT:    vmovdqa 32(%rdx), %xmm14
+; AVX512-NEXT:    vmovdqa 48(%rdx), %xmm10
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
+; AVX512-NEXT:    vpmovsxbd {{.*#+}} zmm5 = [0,0,1,1,2,2,3,3,16,16,17,17,18,18,19,19]
+; AVX512-NEXT:    vpermt2d %zmm15, %zmm5, %zmm8
 ; AVX512-NEXT:    movw $-21846, %ax # imm = 0xAAAA
 ; AVX512-NEXT:    kmovw %eax, %k1
-; AVX512-NEXT:    vmovdqa32 %zmm9, %zmm17 {%k1}
-; AVX512-NEXT:    vmovdqa 96(%rcx), %xmm9
-; AVX512-NEXT:    vmovdqa 96(%rdx), %xmm14
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm9[4],xmm14[5],xmm9[5],xmm14[6],xmm9[6],xmm14[7],xmm9[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm14[0],xmm9[0],xmm14[1],xmm9[1],xmm14[2],xmm9[2],xmm14[3],xmm9[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm14 = xmm9[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm9, %ymm14, %ymm9
-; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm9, %zmm14
-; AVX512-NEXT:    vmovdqa 96(%rsi), %xmm9
+; AVX512-NEXT:    vmovdqa32 %zmm8, %zmm18 {%k1}
+; AVX512-NEXT:    vmovdqa 96(%rsi), %xmm8
 ; AVX512-NEXT:    vmovdqa 96(%rdi), %xmm15
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm9[4],xmm15[5],xmm9[5],xmm15[6],xmm9[6],xmm15[7],xmm9[7]
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm8[4],xmm15[5],xmm8[5],xmm15[6],xmm8[6],xmm15[7],xmm8[7]
 ; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
 ; AVX512-NEXT:    vinserti32x4 $1, %xmm0, %ymm16, %ymm0
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm9[0],zero,xmm9[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm9, %ymm15, %ymm9
-; AVX512-NEXT:    vinserti64x4 $1, %ymm0, %zmm9, %zmm18
-; AVX512-NEXT:    vmovdqa32 %zmm14, %zmm18 {%k1}
-; AVX512-NEXT:    vmovdqa 112(%rcx), %xmm0
-; AVX512-NEXT:    vmovdqa 112(%rdx), %xmm14
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm14[0],xmm0[0],xmm14[1],xmm0[1],xmm14[2],xmm0[2],xmm14[3],xmm0[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm14 = xmm0[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm14, %ymm0
-; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm0
-; AVX512-NEXT:    vmovdqa 112(%rsi), %xmm14
-; AVX512-NEXT:    vmovdqa 112(%rdi), %xmm15
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm3[0],zero,xmm3[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm3, %ymm16, %ymm3
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm14 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm14[0],zero,xmm14[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm14 = xmm14[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm14, %ymm15, %ymm14
-; AVX512-NEXT:    vinserti64x4 $1, %ymm3, %zmm14, %zmm19
-; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm19 {%k1}
-; AVX512-NEXT:    vmovdqa 64(%rcx), %xmm0
-; AVX512-NEXT:    vmovdqa 64(%rdx), %xmm3
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm8[0],zero,xmm8[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
+; AVX512-NEXT:    vinserti128 $1, %xmm8, %ymm15, %ymm8
+; AVX512-NEXT:    vinserti64x4 $1, %ymm0, %zmm8, %zmm8
+; AVX512-NEXT:    vmovdqa 96(%rcx), %xmm0
+; AVX512-NEXT:    vmovdqa 96(%rdx), %xmm15
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
+; AVX512-NEXT:    vpermt2d %zmm1, %zmm5, %zmm0
+; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm8 {%k1}
+; AVX512-NEXT:    vmovdqa 112(%rsi), %xmm0
+; AVX512-NEXT:    vmovdqa 112(%rdi), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
 ; AVX512-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
-; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm0
-; AVX512-NEXT:    vmovdqa 64(%rsi), %xmm3
-; AVX512-NEXT:    vmovdqa 64(%rdi), %xmm15
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm15[4],xmm3[4],xmm15[5],xmm3[5],xmm15[6],xmm3[6],xmm15[7],xmm3[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm9[0],zero,xmm9[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm9, %ymm16, %ymm9
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm15[0],xmm3[0],xmm15[1],xmm3[1],xmm15[2],xmm3[2],xmm15[3],xmm3[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm3[0],zero,xmm3[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm3, %ymm15, %ymm3
-; AVX512-NEXT:    vinserti64x4 $1, %ymm9, %zmm3, %zmm15
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm15
+; AVX512-NEXT:    vmovdqa 112(%rcx), %xmm0
+; AVX512-NEXT:    vmovdqa 112(%rdx), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
 ; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm15 {%k1}
-; AVX512-NEXT:    vmovdqa 80(%rcx), %xmm0
-; AVX512-NEXT:    vmovdqa 80(%rdx), %xmm3
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm16 = xmm9[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm9, %ymm16, %ymm9
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
+; AVX512-NEXT:    vmovdqa 64(%rsi), %xmm0
+; AVX512-NEXT:    vmovdqa 64(%rdi), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm2[0],zero,xmm2[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512-NEXT:    vinserti32x4 $1, %xmm2, %ymm16, %ymm2
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
-; AVX512-NEXT:    vinserti64x4 $1, %ymm9, %zmm0, %zmm0
-; AVX512-NEXT:    vmovdqa 80(%rsi), %xmm3
-; AVX512-NEXT:    vmovdqa 80(%rdi), %xmm9
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm9[4],xmm3[4],xmm9[5],xmm3[5],xmm9[6],xmm3[6],xmm9[7],xmm3[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm14[0],zero,xmm14[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm14 = xmm14[2,2,3,3]
-; AVX512-NEXT:    vinserti32x4 $1, %xmm14, %ymm16, %ymm14
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm9[0],xmm3[0],xmm9[1],xmm3[1],xmm9[2],xmm3[2],xmm9[3],xmm3[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm9 = xmm3[0],zero,xmm3[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm3, %ymm9, %ymm3
-; AVX512-NEXT:    vinserti64x4 $1, %ymm14, %zmm3, %zmm16
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm16
+; AVX512-NEXT:    vmovdqa 64(%rcx), %xmm0
+; AVX512-NEXT:    vmovdqa 64(%rdx), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
 ; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm16 {%k1}
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm0[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm3[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm3, %ymm9, %ymm3
-; AVX512-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm0
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm13[4],xmm12[4],xmm13[5],xmm12[5],xmm13[6],xmm12[6],xmm13[7],xmm12[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm9 = xmm3[0],zero,xmm3[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm3, %ymm9, %ymm3
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm10 = xmm9[0],zero,xmm9[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm9, %ymm10, %ymm9
-; AVX512-NEXT:    vinserti64x4 $1, %ymm3, %zmm9, %zmm3
-; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm3 {%k1}
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm6[4],xmm5[4],xmm6[5],xmm5[5],xmm6[6],xmm5[6],xmm6[7],xmm5[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm9 = xmm0[0,0,1,1]
+; AVX512-NEXT:    vmovdqa 80(%rsi), %xmm0
+; AVX512-NEXT:    vmovdqa 80(%rdi), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm17 = xmm2[0],zero,xmm2[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512-NEXT:    vinserti32x4 $1, %xmm2, %ymm17, %ymm2
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm9, %ymm0
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm6[0],xmm5[0],xmm6[1],xmm5[1],xmm6[2],xmm5[2],xmm6[3],xmm5[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512-NEXT:    vinserti64x4 $1, %ymm0, %zmm5, %zmm0
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm7[4],xmm8[5],xmm7[5],xmm8[6],xmm7[6],xmm8[7],xmm7[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm8[0],xmm7[0],xmm8[1],xmm7[1],xmm8[2],xmm7[2],xmm8[3],xmm7[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; AVX512-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm5
-; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm5 {%k1}
-; AVX512-NEXT:    vmovdqa64 %xmm20, %xmm7
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm7[4],xmm1[5],xmm7[5],xmm1[6],xmm7[6],xmm1[7],xmm7[7]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm17
+; AVX512-NEXT:    vmovdqa 80(%rcx), %xmm0
+; AVX512-NEXT:    vmovdqa 80(%rdx), %xmm1
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
+; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm17 {%k1}
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm7[0],xmm1[1],xmm7[1],xmm1[2],xmm7[2],xmm1[3],xmm7[3]
-; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm1[0,0,1,1]
+; AVX512-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm6, %ymm1
+; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm1
 ; AVX512-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm1[0],zero,xmm1[1],zero
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
+; AVX512-NEXT:    vpermt2d %zmm1, %zmm5, %zmm2
+; AVX512-NEXT:    vmovdqa32 %zmm2, %zmm0 {%k1}
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm6, %ymm1
-; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm4 = xmm2[0],zero,xmm2[1],zero
+; AVX512-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
 ; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm4, %ymm2
+; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm6, %ymm2
 ; AVX512-NEXT:    vinserti64x4 $1, %ymm1, %zmm2, %zmm1
-; AVX512-NEXT:    vmovdqa32 %zmm0, %zmm1 {%k1}
-; AVX512-NEXT:    vmovdqa64 %zmm1, (%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm5, 192(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm3, 128(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm16, 320(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm15, 256(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm19, 448(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm18, 384(%r8)
-; AVX512-NEXT:    vmovdqa64 %zmm17, 64(%r8)
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
+; AVX512-NEXT:    vpermt2d %zmm2, %zmm5, %zmm6
+; AVX512-NEXT:    vmovdqa32 %zmm6, %zmm1 {%k1}
+; AVX512-NEXT:    vmovdqa64 %xmm19, %xmm7
+; AVX512-NEXT:    vmovdqa64 %xmm20, %xmm9
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512-NEXT:    vinserti128 $1, %xmm2, %ymm6, %ymm2
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
+; AVX512-NEXT:    vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
+; AVX512-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
+; AVX512-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
+; AVX512-NEXT:    vinserti64x4 $1, %ymm2, %zmm6, %zmm2
+; AVX512-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
+; AVX512-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
+; AVX512-NEXT:    vpermt2d %zmm6, %zmm5, %zmm3
+; AVX512-NEXT:    vmovdqa32 %zmm3, %zmm2 {%k1}
+; AVX512-NEXT:    vmovdqa64 %zmm2, (%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm1, 192(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm0, 128(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm17, 320(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm16, 256(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm15, 448(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm8, 384(%r8)
+; AVX512-NEXT:    vmovdqa64 %zmm18, 64(%r8)
 ; AVX512-NEXT:    vzeroupper
 ; AVX512-NEXT:    retq
 ;
@@ -3466,192 +3378,155 @@ define void @store_i16_stride4_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ;
 ; AVX512DQ-LABEL: store_i16_stride4_vf64:
 ; AVX512DQ:       # %bb.0:
-; AVX512DQ-NEXT:    vmovdqa (%rsi), %xmm1
+; AVX512DQ-NEXT:    vmovdqa64 (%rsi), %xmm19
 ; AVX512DQ-NEXT:    vmovdqa 16(%rsi), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 32(%rsi), %xmm10
-; AVX512DQ-NEXT:    vmovdqa 48(%rsi), %xmm5
-; AVX512DQ-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512DQ-NEXT:    vmovdqa 32(%rsi), %xmm11
+; AVX512DQ-NEXT:    vmovdqa 48(%rsi), %xmm6
+; AVX512DQ-NEXT:    vmovdqa64 (%rdi), %xmm20
 ; AVX512DQ-NEXT:    vmovdqa 16(%rdi), %xmm3
-; AVX512DQ-NEXT:    vmovdqa 32(%rdi), %xmm11
+; AVX512DQ-NEXT:    vmovdqa 32(%rdi), %xmm12
 ; AVX512DQ-NEXT:    vmovdqa 48(%rdi), %xmm7
 ; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm4[0],zero,xmm4[1],zero
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm5 = xmm4[0],zero,xmm4[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm4 = xmm4[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm4, %ymm6, %ymm4
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm4, %ymm5, %ymm4
 ; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
 ; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm3 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
 ; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm17
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm18
 ; AVX512DQ-NEXT:    vmovdqa (%rcx), %xmm3
-; AVX512DQ-NEXT:    vmovdqa 16(%rcx), %xmm6
-; AVX512DQ-NEXT:    vmovdqa 32(%rcx), %xmm12
-; AVX512DQ-NEXT:    vmovdqa 48(%rcx), %xmm8
+; AVX512DQ-NEXT:    vmovdqa 16(%rcx), %xmm5
+; AVX512DQ-NEXT:    vmovdqa 32(%rcx), %xmm13
+; AVX512DQ-NEXT:    vmovdqa 48(%rcx), %xmm9
 ; AVX512DQ-NEXT:    vmovdqa (%rdx), %xmm4
-; AVX512DQ-NEXT:    vmovdqa 16(%rdx), %xmm13
+; AVX512DQ-NEXT:    vmovdqa 16(%rdx), %xmm8
 ; AVX512DQ-NEXT:    vmovdqa 32(%rdx), %xmm14
-; AVX512DQ-NEXT:    vmovdqa 48(%rdx), %xmm9
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm6[4],xmm13[5],xmm6[5],xmm13[6],xmm6[6],xmm13[7],xmm6[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm16 = xmm15[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm6[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm13, %ymm6
+; AVX512DQ-NEXT:    vmovdqa 48(%rdx), %xmm10
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm8[4],xmm5[4],xmm8[5],xmm5[5],xmm8[6],xmm5[6],xmm8[7],xmm5[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm5[0],xmm8[1],xmm5[1],xmm8[2],xmm5[2],xmm8[3],xmm5[3]
+; AVX512DQ-NEXT:    vpmovsxbd {{.*#+}} zmm5 = [0,0,1,1,2,2,3,3,16,16,17,17,18,18,19,19]
+; AVX512DQ-NEXT:    vpermt2d %zmm15, %zmm5, %zmm8
 ; AVX512DQ-NEXT:    movw $-21846, %ax # imm = 0xAAAA
 ; AVX512DQ-NEXT:    kmovw %eax, %k1
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm15, %zmm6, %zmm17 {%k1}
-; AVX512DQ-NEXT:    vmovdqa 96(%rsi), %xmm6
-; AVX512DQ-NEXT:    vmovdqa 96(%rdi), %xmm13
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm6[4],xmm13[5],xmm6[5],xmm13[6],xmm6[6],xmm13[7],xmm6[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm13[0],xmm6[0],xmm13[1],xmm6[1],xmm13[2],xmm6[2],xmm13[3],xmm6[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm13 = xmm6[0],zero,xmm6[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm13, %ymm6
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm15, %zmm6, %zmm18
-; AVX512DQ-NEXT:    vmovdqa 96(%rcx), %xmm13
-; AVX512DQ-NEXT:    vmovdqa 96(%rdx), %xmm15
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm13[4],xmm15[5],xmm13[5],xmm15[6],xmm13[6],xmm15[7],xmm13[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm16 = xmm0[0,0,1,1]
+; AVX512DQ-NEXT:    vmovdqa32 %zmm8, %zmm18 {%k1}
+; AVX512DQ-NEXT:    vmovdqa 96(%rsi), %xmm8
+; AVX512DQ-NEXT:    vmovdqa 96(%rdi), %xmm15
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm8[4],xmm15[5],xmm8[5],xmm15[6],xmm8[6],xmm15[7],xmm8[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
 ; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm0, %ymm16, %ymm0
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm15[0],xmm13[0],xmm15[1],xmm13[1],xmm15[2],xmm13[2],xmm15[3],xmm13[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm13[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm13, %ymm15, %ymm13
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm0, %zmm13, %zmm18 {%k1}
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm15[0],xmm8[0],xmm15[1],xmm8[1],xmm15[2],xmm8[2],xmm15[3],xmm8[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm15 = xmm8[0],zero,xmm8[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm8, %ymm15, %ymm8
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm8, %zmm8
+; AVX512DQ-NEXT:    vmovdqa 96(%rcx), %xmm0
+; AVX512DQ-NEXT:    vmovdqa 96(%rdx), %xmm15
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm1, %zmm5, %zmm0
+; AVX512DQ-NEXT:    vmovdqa32 %zmm0, %zmm8 {%k1}
 ; AVX512DQ-NEXT:    vmovdqa 112(%rsi), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 112(%rdi), %xmm13
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm13[4],xmm0[4],xmm13[5],xmm0[5],xmm13[6],xmm0[6],xmm13[7],xmm0[7]
+; AVX512DQ-NEXT:    vmovdqa 112(%rdi), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
 ; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
 ; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm13[0],xmm0[0],xmm13[1],xmm0[1],xmm13[2],xmm0[2],xmm13[3],xmm0[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm13 = xmm0[0],zero,xmm0[1],zero
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm13, %ymm0
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm19
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm15
 ; AVX512DQ-NEXT:    vmovdqa 112(%rcx), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 112(%rdx), %xmm15
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm15[4],xmm0[4],xmm15[5],xmm0[5],xmm15[6],xmm0[6],xmm15[7],xmm0[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm16 = xmm6[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm6, %ymm16, %ymm6
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm0[0],xmm15[1],xmm0[1],xmm15[2],xmm0[2],xmm15[3],xmm0[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm0[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm15, %ymm0
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm6, %zmm0, %zmm19 {%k1}
+; AVX512DQ-NEXT:    vmovdqa 112(%rdx), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
+; AVX512DQ-NEXT:    vmovdqa32 %zmm0, %zmm15 {%k1}
 ; AVX512DQ-NEXT:    vmovdqa 64(%rsi), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 64(%rdi), %xmm6
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm15[0],zero,xmm15[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm15, %ymm16, %ymm15
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
+; AVX512DQ-NEXT:    vmovdqa 64(%rdi), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm2[0],zero,xmm2[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm2, %ymm16, %ymm2
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm15, %zmm0, %zmm15
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm16
 ; AVX512DQ-NEXT:    vmovdqa 64(%rcx), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 64(%rdx), %xmm6
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm16 = xmm13[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm13, %ymm16, %ymm13
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm13, %zmm0, %zmm15 {%k1}
+; AVX512DQ-NEXT:    vmovdqa 64(%rdx), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
+; AVX512DQ-NEXT:    vmovdqa32 %zmm0, %zmm16 {%k1}
 ; AVX512DQ-NEXT:    vmovdqa 80(%rsi), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 80(%rdi), %xmm6
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm16 = xmm13[0],zero,xmm13[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm13, %ymm16, %ymm13
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
+; AVX512DQ-NEXT:    vmovdqa 80(%rdi), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm17 = xmm2[0],zero,xmm2[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm2, %ymm17, %ymm2
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm13, %zmm0, %zmm16
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm17
 ; AVX512DQ-NEXT:    vmovdqa 80(%rcx), %xmm0
-; AVX512DQ-NEXT:    vmovdqa 80(%rdx), %xmm6
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm13 = xmm6[4],xmm0[4],xmm6[5],xmm0[5],xmm6[6],xmm0[6],xmm6[7],xmm0[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm20 = xmm13[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti32x4 $1, %xmm13, %ymm20, %ymm13
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm6[0],xmm0[0],xmm6[1],xmm0[1],xmm6[2],xmm0[2],xmm6[3],xmm0[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm13, %zmm0, %zmm16 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm10 = xmm6[0],zero,xmm6[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm10, %ymm6
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm6, %zmm10
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm14[4],xmm12[4],xmm14[5],xmm12[5],xmm14[6],xmm12[6],xmm14[7],xmm12[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm0[0,0,1,1]
+; AVX512DQ-NEXT:    vmovdqa 80(%rdx), %xmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm2, %zmm5, %zmm0
+; AVX512DQ-NEXT:    vmovdqa32 %zmm0, %zmm17 {%k1}
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm1 = xmm0[0],zero,xmm0[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm14[0],xmm12[0],xmm14[1],xmm12[1],xmm14[2],xmm12[2],xmm14[3],xmm12[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm11 = xmm6[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm11, %ymm6
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm0, %zmm6, %zmm10 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm7[4],xmm5[4],xmm7[5],xmm5[5],xmm7[6],xmm5[6],xmm7[7],xmm5[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm0[0],zero,xmm0[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm6, %ymm0
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm7[0],xmm5[0],xmm7[1],xmm5[1],xmm7[2],xmm5[2],xmm7[3],xmm5[3]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm5, %zmm0
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm9[4],xmm8[4],xmm9[5],xmm8[5],xmm9[6],xmm8[6],xmm9[7],xmm8[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm5[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm8[0],xmm9[1],xmm8[1],xmm9[2],xmm8[2],xmm9[3],xmm8[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm7 = xmm6[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm5, %zmm6, %zmm0 {%k1}
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm5[0],zero,xmm5[1],zero
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm5, %ymm6, %ymm5
-; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm0, %ymm1, %ymm0
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3]
 ; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
 ; AVX512DQ-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm1
-; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm1
-; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm5 = xmm2[0,0,1,1]
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm1, %zmm5, %zmm2
+; AVX512DQ-NEXT:    vmovdqa32 %zmm2, %zmm0 {%k1}
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm2 = xmm1[0],zero,xmm1[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm1, %ymm2, %ymm1
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm2, %ymm6, %ymm2
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm1, %zmm2, %zmm1
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm10[4],xmm9[4],xmm10[5],xmm9[5],xmm10[6],xmm9[6],xmm10[7],xmm9[7]
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm10[0],xmm9[0],xmm10[1],xmm9[1],xmm10[2],xmm9[2],xmm10[3],xmm9[3]
+; AVX512DQ-NEXT:    vpermt2d %zmm2, %zmm5, %zmm6
+; AVX512DQ-NEXT:    vmovdqa32 %zmm6, %zmm1 {%k1}
+; AVX512DQ-NEXT:    vmovdqa64 %xmm19, %xmm7
+; AVX512DQ-NEXT:    vmovdqa64 %xmm20, %xmm9
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm6 = xmm2[0],zero,xmm2[1],zero
 ; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm2, %ymm5, %ymm2
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm2, %ymm6, %ymm2
+; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3]
+; AVX512DQ-NEXT:    vpmovzxdq {{.*#+}} xmm7 = xmm6[0],zero,xmm6[1],zero
+; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm6 = xmm6[2,2,3,3]
+; AVX512DQ-NEXT:    vinserti128 $1, %xmm6, %ymm7, %ymm6
+; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm2, %zmm6, %zmm2
+; AVX512DQ-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
 ; AVX512DQ-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm4 = xmm3[0,0,1,1]
-; AVX512DQ-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[2,2,3,3]
-; AVX512DQ-NEXT:    vinserti128 $1, %xmm3, %ymm4, %ymm3
-; AVX512DQ-NEXT:    vinserti32x8 $1, %ymm2, %zmm3, %zmm1 {%k1}
-; AVX512DQ-NEXT:    vmovdqa64 %zmm1, (%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm0, 192(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm10, 128(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm16, 320(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm15, 256(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm19, 448(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm18, 384(%r8)
-; AVX512DQ-NEXT:    vmovdqa64 %zmm17, 64(%r8)
+; AVX512DQ-NEXT:    vpermt2d %zmm6, %zmm5, %zmm3
+; AVX512DQ-NEXT:    vmovdqa32 %zmm3, %zmm2 {%k1}
+; AVX512DQ-NEXT:    vmovdqa64 %zmm2, (%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm1, 192(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm0, 128(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm17, 320(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm16, 256(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm15, 448(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm8, 384(%r8)
+; AVX512DQ-NEXT:    vmovdqa64 %zmm18, 64(%r8)
 ; AVX512DQ-NEXT:    vzeroupper
 ; AVX512DQ-NEXT:    retq
 ;

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-8.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-8.ll
index d8fd21f4877f01..a343df8428fb41 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-8.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-8.ll
@@ -1151,24 +1151,25 @@ define void @store_i8_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecp
 ; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
 ; AVX512BW-NEXT:    vinserti128 $1, %xmm3, %ymm2, %ymm2
 ; AVX512BW-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm1
 ; AVX512BW-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm1 = zmm0[0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u]
+; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm3 = zmm0[0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u]
 ; AVX512BW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,0,1,6,7,4,5]
 ; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u,u,u]
 ; AVX512BW-NEXT:    movl $287445282, %ecx # imm = 0x11221122
 ; AVX512BW-NEXT:    kmovd %ecx, %k1
-; AVX512BW-NEXT:    vmovdqu16 %zmm0, %zmm1 {%k1}
-; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm2[2,3,0,1,2,3,0,1]
-; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u]
-; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm2[0,1,2,3,0,1,2,3]
-; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm2 = zmm2[u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63]
+; AVX512BW-NEXT:    vmovdqu16 %zmm0, %zmm3 {%k1}
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[4,5,6,7,4,5,6,7]
+; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63]
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm2[2,3,0,1,2,3,0,1]
+; AVX512BW-NEXT:    vpshufb {{.*#+}} zmm1 = zmm1[u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u]
 ; AVX512BW-NEXT:    movl $1149781128, %ecx # imm = 0x44884488
 ; AVX512BW-NEXT:    kmovd %ecx, %k1
-; AVX512BW-NEXT:    vmovdqu16 %zmm0, %zmm2 {%k1}
+; AVX512BW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
 ; AVX512BW-NEXT:    movw $-21846, %cx # imm = 0xAAAA
 ; AVX512BW-NEXT:    kmovd %ecx, %k1
-; AVX512BW-NEXT:    vmovdqa32 %zmm2, %zmm1 {%k1}
-; AVX512BW-NEXT:    vmovdqa64 %zmm1, (%rax)
+; AVX512BW-NEXT:    vmovdqa32 %zmm0, %zmm3 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, (%rax)
 ; AVX512BW-NEXT:    vzeroupper
 ; AVX512BW-NEXT:    retq
 ;
@@ -1224,24 +1225,25 @@ define void @store_i8_stride8_vf8(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vecp
 ; AVX512DQ-BW-NEXT:    vpunpcklqdq {{.*#+}} xmm3 = xmm4[0],xmm3[0]
 ; AVX512DQ-BW-NEXT:    vinserti128 $1, %xmm3, %ymm2, %ymm2
 ; AVX512DQ-BW-NEXT:    vinserti128 $1, %xmm1, %ymm0, %ymm0
+; AVX512DQ-BW-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm1
 ; AVX512DQ-BW-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm1 = zmm0[0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u]
+; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm3 = zmm0[0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u]
 ; AVX512DQ-BW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,0,1,6,7,4,5]
 ; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u,u,u,u,u]
 ; AVX512DQ-BW-NEXT:    movl $287445282, %ecx # imm = 0x11221122
 ; AVX512DQ-BW-NEXT:    kmovd %ecx, %k1
-; AVX512DQ-BW-NEXT:    vmovdqu16 %zmm0, %zmm1 {%k1}
-; AVX512DQ-BW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm2[2,3,0,1,2,3,0,1]
-; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u]
-; AVX512DQ-BW-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm2[0,1,2,3,0,1,2,3]
-; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm2 = zmm2[u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63]
+; AVX512DQ-BW-NEXT:    vmovdqu16 %zmm0, %zmm3 {%k1}
+; AVX512DQ-BW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[4,5,6,7,4,5,6,7]
+; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,u,u,u,u,54,62,u,u,u,u,u,u,55,63]
+; AVX512DQ-BW-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm2[2,3,0,1,2,3,0,1]
+; AVX512DQ-BW-NEXT:    vpshufb {{.*#+}} zmm1 = zmm1[u,u,u,u,u,u,0,8,u,u,u,u,u,u,1,9,u,u,u,u,18,26,u,u,u,u,u,u,19,27,u,u,u,u,u,u,u,u,36,44,u,u,u,u,u,u,37,45,u,u,u,u,54,62,u,u,u,u,u,u,55,63,u,u]
 ; AVX512DQ-BW-NEXT:    movl $1149781128, %ecx # imm = 0x44884488
 ; AVX512DQ-BW-NEXT:    kmovd %ecx, %k1
-; AVX512DQ-BW-NEXT:    vmovdqu16 %zmm0, %zmm2 {%k1}
+; AVX512DQ-BW-NEXT:    vmovdqu16 %zmm1, %zmm0 {%k1}
 ; AVX512DQ-BW-NEXT:    movw $-21846, %cx # imm = 0xAAAA
 ; AVX512DQ-BW-NEXT:    kmovd %ecx, %k1
-; AVX512DQ-BW-NEXT:    vmovdqa32 %zmm2, %zmm1 {%k1}
-; AVX512DQ-BW-NEXT:    vmovdqa64 %zmm1, (%rax)
+; AVX512DQ-BW-NEXT:    vmovdqa32 %zmm0, %zmm3 {%k1}
+; AVX512DQ-BW-NEXT:    vmovdqa64 %zmm3, (%rax)
 ; AVX512DQ-BW-NEXT:    vzeroupper
 ; AVX512DQ-BW-NEXT:    retq
 ;
@@ -8870,234 +8872,172 @@ define void @store_i8_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP:       # %bb.0:
 ; AVX512BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512BW-FCP-NEXT:    vmovdqa (%r10), %xmm0
-; AVX512BW-FCP-NEXT:    vmovdqa 16(%r10), %xmm14
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r10), %xmm18
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r10), %xmm17
-; AVX512BW-FCP-NEXT:    vmovdqa (%rax), %xmm1
-; AVX512BW-FCP-NEXT:    vmovdqa 16(%rax), %xmm15
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rax), %xmm19
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rax), %xmm20
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm2, %ymm2, %ymm2
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm2, %zmm3
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7,0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm3, %zmm6
-; AVX512BW-FCP-NEXT:    vmovdqa (%r9), %xmm3
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r9), %xmm21
-; AVX512BW-FCP-NEXT:    vmovdqa (%r8), %xmm4
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r8), %xmm22
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm5, %ymm5, %ymm5
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm5, %zmm5, %zmm7
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7,0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm7, %zmm16
-; AVX512BW-FCP-NEXT:    movl $-2004318072, %eax # imm = 0x88888888
-; AVX512BW-FCP-NEXT:    kmovd %eax, %k1
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm6, %zmm16 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa (%rcx), %xmm6
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rcx), %xmm23
+; AVX512BW-FCP-NEXT:    vmovdqa (%rsi), %xmm0
+; AVX512BW-FCP-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm20
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rsi), %xmm17
+; AVX512BW-FCP-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rdi), %xmm21
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rdi), %xmm18
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
+; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm3
+; AVX512BW-FCP-NEXT:    vpmovsxwq {{.*#+}} ymm4 = [2312,2826,3340,3854]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm3, %ymm3
+; AVX512BW-FCP-NEXT:    vpmovsxwq {{.*#+}} xmm5 = [1284,1798]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm6
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm6, %ymm0, %ymm0
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm1
+; AVX512BW-FCP-NEXT:    vmovdqa (%rcx), %xmm3
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm22
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rcx), %xmm19
 ; AVX512BW-FCP-NEXT:    vmovdqa (%rdx), %xmm7
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm8, %ymm8, %ymm8
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm8, %zmm8, %zmm8
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm9 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15,8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm8, %zmm24
-; AVX512BW-FCP-NEXT:    vmovdqa (%rsi), %xmm10
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rsi), %xmm25
-; AVX512BW-FCP-NEXT:    vmovdqa (%rdi), %xmm11
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rdi), %xmm28
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm8, %ymm8, %ymm13
-; AVX512BW-FCP-NEXT:    vpmovsxwq {{.*#+}} ymm12 = [2312,2826,3340,3854]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm13, %ymm26
-; AVX512BW-FCP-NEXT:    vpmovsxwq {{.*#+}} xmm13 = [1284,1798]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm8, %xmm27
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm8, %ymm8
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm8, %zmm8
-; AVX512BW-FCP-NEXT:    movl $572662306, %eax # imm = 0x22222222
-; AVX512BW-FCP-NEXT:    kmovd %eax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm24, %zmm8 {%k2}
-; AVX512BW-FCP-NEXT:    movw $-21846, %ax # imm = 0xAAAA
-; AVX512BW-FCP-NEXT:    kmovd %eax, %k3
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm16, %zmm8 {%k3}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm20[0],xmm17[0],xmm20[1],xmm17[1],xmm20[2],xmm17[2],xmm20[3],xmm17[3],xmm20[4],xmm17[4],xmm20[5],xmm17[5],xmm20[6],xmm17[6],xmm20[7],xmm17[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm16, %ymm16, %ymm16
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm16, %zmm16
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm16, %zmm16
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm22[0],xmm21[0],xmm22[1],xmm21[1],xmm22[2],xmm21[2],xmm22[3],xmm21[3],xmm22[4],xmm21[4],xmm22[5],xmm21[5],xmm22[6],xmm21[6],xmm22[7],xmm21[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm24, %ymm24, %ymm24
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm24, %zmm24
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm24, %zmm24
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm16, %zmm24 {%k1}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm28[0],xmm25[0],xmm28[1],xmm25[1],xmm28[2],xmm25[2],xmm28[3],xmm25[3],xmm28[4],xmm25[4],xmm28[5],xmm25[5],xmm28[6],xmm25[6],xmm28[7],xmm25[7]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm16, %xmm26
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm27 = xmm16[0],zero,zero,zero,xmm16[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm27, %ymm26
-; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rdx), %xmm30
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm16, %ymm16, %ymm16
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm16, %ymm16
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm26, %zmm16
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm26 = xmm30[0],xmm23[0],xmm30[1],xmm23[1],xmm30[2],xmm23[2],xmm30[3],xmm23[3],xmm30[4],xmm23[4],xmm30[5],xmm23[5],xmm30[6],xmm23[6],xmm30[7],xmm23[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm26, %ymm26
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm26, %zmm26
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm26, %zmm26
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm26, %zmm16 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm26
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm16 {%k3}
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm27
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm20[8],xmm17[8],xmm20[9],xmm17[9],xmm20[10],xmm17[10],xmm20[11],xmm17[11],xmm20[12],xmm17[12],xmm20[13],xmm17[13],xmm20[14],xmm17[14],xmm20[15],xmm17[15]
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm24
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm22[8],xmm21[8],xmm22[9],xmm21[9],xmm22[10],xmm21[10],xmm22[11],xmm21[11],xmm22[12],xmm21[12],xmm22[13],xmm21[13],xmm22[14],xmm21[14],xmm22[15],xmm21[15]
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm21
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rdx), %xmm23
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rdx), %xmm24
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm7[8],xmm3[8],xmm7[9],xmm3[9],xmm7[10],xmm3[10],xmm7[11],xmm3[11],xmm7[12],xmm3[12],xmm7[13],xmm3[13],xmm7[14],xmm3[14],xmm7[15],xmm3[15]
+; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm8 = [0,0,2,1,2,1,6,7,0,2,2,3,2,3,6,7,4,4,2,3,6,5,6,5,4,6,2,3,6,7,6,7]
+; AVX512BW-FCP-NEXT:    movl $572662306, %r11d # imm = 0x22222222
+; AVX512BW-FCP-NEXT:    kmovd %r11d, %k1
+; AVX512BW-FCP-NEXT:    vpermw %zmm6, %zmm8, %zmm1 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa (%r10), %xmm6
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r10), %xmm25
+; AVX512BW-FCP-NEXT:    vmovdqa (%rax), %xmm9
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%rax), %xmm26
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm15 = xmm9[8],xmm6[8],xmm9[9],xmm6[9],xmm9[10],xmm6[10],xmm9[11],xmm6[11],xmm9[12],xmm6[12],xmm9[13],xmm6[13],xmm9[14],xmm6[14],xmm9[15],xmm6[15]
+; AVX512BW-FCP-NEXT:    vmovdqa (%r9), %xmm10
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r9), %xmm27
+; AVX512BW-FCP-NEXT:    vmovdqa (%r8), %xmm11
+; AVX512BW-FCP-NEXT:    vmovdqa64 48(%r8), %xmm28
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm12 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
+; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm13 = [0,1,0,1,4,5,1,3,2,1,2,1,4,5,3,3,0,1,4,5,4,5,5,7,0,1,6,5,6,5,7,7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm12, %zmm13, %zmm12
+; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm14 = [0,0,0,0,4,5,2,1,0,2,0,2,4,5,2,3,0,1,4,4,4,4,6,5,0,1,4,6,4,6,6,7]
+; AVX512BW-FCP-NEXT:    movl $-2004318072, %r11d # imm = 0x88888888
+; AVX512BW-FCP-NEXT:    kmovd %r11d, %k2
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm14, %zmm12 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm15, %ymm15, %ymm16
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm16, %ymm16
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm15, %xmm29
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm15 = xmm15[0],zero,zero,zero,xmm15[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm29, %ymm15, %ymm15
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm15, %zmm15
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm24[0],xmm19[0],xmm24[1],xmm19[1],xmm24[2],xmm19[2],xmm24[3],xmm19[3],xmm24[4],xmm19[4],xmm24[5],xmm19[5],xmm24[6],xmm19[6],xmm24[7],xmm19[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm8, %zmm15 {%k1}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm29 = xmm26[0],xmm25[0],xmm26[1],xmm25[1],xmm26[2],xmm25[2],xmm26[3],xmm25[3],xmm26[4],xmm25[4],xmm26[5],xmm25[5],xmm26[6],xmm25[6],xmm26[7],xmm25[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm28[0],xmm27[0],xmm28[1],xmm27[1],xmm28[2],xmm27[2],xmm28[3],xmm27[3],xmm28[4],xmm27[4],xmm28[5],xmm27[5],xmm28[6],xmm27[6],xmm28[7],xmm27[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm13, %zmm16
+; AVX512BW-FCP-NEXT:    vpermw %zmm29, %zmm14, %zmm16 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm18[8],xmm17[8],xmm18[9],xmm17[9],xmm18[10],xmm17[10],xmm18[11],xmm17[11],xmm18[12],xmm17[12],xmm18[13],xmm17[13],xmm18[14],xmm17[14],xmm18[15],xmm17[15]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm17, %xmm18
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm29 = xmm17[0],zero,zero,zero,xmm17[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm29, %ymm18
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r10), %xmm29
 ; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm17
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm17, %zmm17
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm17, %zmm17
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm20, %zmm20
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm20, %zmm20
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm17, %zmm20 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rdi), %xmm29
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm30[8],xmm23[8],xmm30[9],xmm23[9],xmm30[10],xmm23[10],xmm30[11],xmm23[11],xmm30[12],xmm23[12],xmm30[13],xmm23[13],xmm30[14],xmm23[14],xmm30[15],xmm23[15]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm17
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm17, %zmm17
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm17, %zmm22
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm28[8],xmm25[8],xmm28[9],xmm25[9],xmm28[10],xmm25[10],xmm28[11],xmm25[11],xmm28[12],xmm25[12],xmm28[13],xmm25[13],xmm28[14],xmm25[14],xmm28[15],xmm25[15]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm23
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm23, %ymm23
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm17, %xmm25
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm17 = xmm17[0],zero,zero,zero,xmm17[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm25, %ymm17, %ymm17
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm23, %zmm17, %zmm17
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm22, %zmm17 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm20, %zmm17 {%k3}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm20 = xmm19[0],xmm18[0],xmm19[1],xmm18[1],xmm19[2],xmm18[2],xmm19[3],xmm18[3],xmm19[4],xmm18[4],xmm19[5],xmm18[5],xmm19[6],xmm18[6],xmm19[7],xmm18[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm20, %zmm20
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm20, %zmm20
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm22 = xmm27[0],xmm26[0],xmm27[1],xmm26[1],xmm27[2],xmm26[2],xmm27[3],xmm26[3],xmm27[4],xmm26[4],xmm27[5],xmm26[5],xmm27[6],xmm26[6],xmm27[7],xmm26[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm22, %ymm22
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm22, %zmm22, %zmm22
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm22, %zmm22
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm20, %zmm22 {%k1}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm20 = xmm29[0],xmm21[0],xmm29[1],xmm21[1],xmm29[2],xmm21[2],xmm29[3],xmm21[3],xmm29[4],xmm21[4],xmm29[5],xmm21[5],xmm29[6],xmm21[6],xmm29[7],xmm21[7]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm20, %xmm23
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm17, %ymm17
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm17
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rax), %xmm30
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm24[8],xmm19[8],xmm24[9],xmm19[9],xmm24[10],xmm19[10],xmm24[11],xmm19[11],xmm24[12],xmm19[12],xmm24[13],xmm19[13],xmm24[14],xmm19[14],xmm24[15],xmm19[15]
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm31
+; AVX512BW-FCP-NEXT:    vpermw %zmm18, %zmm8, %zmm17 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa 32(%r8), %xmm0
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm19 = xmm26[8],xmm25[8],xmm26[9],xmm25[9],xmm26[10],xmm25[10],xmm26[11],xmm25[11],xmm26[12],xmm25[12],xmm26[13],xmm25[13],xmm26[14],xmm25[14],xmm26[15],xmm25[15]
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm28[8],xmm27[8],xmm28[9],xmm27[9],xmm28[10],xmm27[10],xmm28[11],xmm27[11],xmm28[12],xmm27[12],xmm28[13],xmm27[13],xmm28[14],xmm27[14],xmm28[15],xmm27[15]
+; AVX512BW-FCP-NEXT:    vpermw %zmm18, %zmm13, %zmm18
+; AVX512BW-FCP-NEXT:    vpermw %zmm19, %zmm14, %zmm18 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm21[0],xmm20[0],xmm21[1],xmm20[1],xmm21[2],xmm20[2],xmm21[3],xmm20[3],xmm21[4],xmm20[4],xmm21[5],xmm20[5],xmm21[6],xmm20[6],xmm21[7],xmm20[7]
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm19, %ymm19, %ymm24
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm24, %ymm24
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm19, %xmm25
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm19 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm25, %ymm19, %ymm19
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm19, %zmm19
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm24, %zmm8, %zmm19 {%k1}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm25 = xmm30[0],xmm29[0],xmm30[1],xmm29[1],xmm30[2],xmm29[2],xmm30[3],xmm29[3],xmm30[4],xmm29[4],xmm30[5],xmm29[5],xmm30[6],xmm29[6],xmm30[7],xmm29[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm0[0],xmm31[0],xmm0[1],xmm31[1],xmm0[2],xmm31[2],xmm0[3],xmm31[3],xmm0[4],xmm31[4],xmm0[5],xmm31[5],xmm0[6],xmm31[6],xmm0[7],xmm31[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm24, %zmm13, %zmm24
+; AVX512BW-FCP-NEXT:    vpermw %zmm25, %zmm14, %zmm24 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm21[8],xmm20[8],xmm21[9],xmm20[9],xmm21[10],xmm20[10],xmm21[11],xmm20[11],xmm21[12],xmm20[12],xmm21[13],xmm20[13],xmm21[14],xmm20[14],xmm21[15],xmm20[15]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm20, %xmm21
 ; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm25 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm25, %ymm23
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rdx), %xmm28
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm25, %ymm21
 ; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm20, %ymm20
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm23, %zmm20
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm23 = xmm28[0],xmm24[0],xmm28[1],xmm24[1],xmm28[2],xmm24[2],xmm28[3],xmm24[3],xmm28[4],xmm24[4],xmm28[5],xmm24[5],xmm28[6],xmm24[6],xmm28[7],xmm24[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm23, %ymm23
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm23, %zmm23, %zmm23
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm23, %zmm23
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm23, %zmm20 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%r9), %xmm23
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm22, %zmm20 {%k3}
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%r8), %xmm25
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm19[8],xmm18[8],xmm19[9],xmm18[9],xmm19[10],xmm18[10],xmm19[11],xmm18[11],xmm19[12],xmm18[12],xmm19[13],xmm18[13],xmm19[14],xmm18[14],xmm19[15],xmm18[15]
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rcx), %xmm19
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm26 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rsi), %xmm22
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm18
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm18, %zmm18, %zmm18
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm18, %zmm18
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm26, %ymm26
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm26, %zmm26
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm26, %zmm27
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm18, %zmm27 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rdi), %xmm26
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm28[8],xmm24[8],xmm28[9],xmm24[9],xmm28[10],xmm24[10],xmm28[11],xmm24[11],xmm28[12],xmm24[12],xmm28[13],xmm24[13],xmm28[14],xmm24[14],xmm28[15],xmm24[15]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm18
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm18, %zmm18, %zmm18
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm18, %zmm24
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm29[8],xmm21[8],xmm29[9],xmm21[9],xmm29[10],xmm21[10],xmm29[11],xmm21[11],xmm29[12],xmm21[12],xmm29[13],xmm21[13],xmm29[14],xmm21[14],xmm29[15],xmm21[15]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm21
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm21, %ymm21
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm18, %xmm28
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm18 = xmm18[0],zero,zero,zero,xmm18[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm28, %ymm18, %ymm18
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm18, %zmm18
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm24, %zmm18 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm27, %zmm18 {%k3}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm21 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm21, %ymm21
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm21, %zmm21
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm21, %zmm21
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm25[0],xmm23[0],xmm25[1],xmm23[1],xmm25[2],xmm23[2],xmm25[3],xmm23[3],xmm25[4],xmm23[4],xmm25[5],xmm23[5],xmm25[6],xmm23[6],xmm25[7],xmm23[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm24, %ymm24, %ymm24
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm24, %zmm24
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm24, %zmm24
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm21, %zmm24 {%k1}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm21 = xmm26[0],xmm22[0],xmm26[1],xmm22[1],xmm26[2],xmm22[2],xmm26[3],xmm22[3],xmm26[4],xmm22[4],xmm26[5],xmm22[5],xmm26[6],xmm22[6],xmm26[7],xmm22[7]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm21, %xmm27
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm28 = xmm21[0],zero,zero,zero,xmm21[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm28, %ymm27
-; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rdx), %xmm28
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm21, %ymm21
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm21, %ymm21
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm27, %zmm21
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm27 = xmm28[0],xmm19[0],xmm28[1],xmm19[1],xmm28[2],xmm19[2],xmm28[3],xmm19[3],xmm28[4],xmm19[4],xmm28[5],xmm19[5],xmm28[6],xmm19[6],xmm28[7],xmm19[7]
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm27, %ymm27
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm27, %zmm27, %zmm27
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm27, %zmm27
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm27, %zmm21 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm21 {%k3}
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm15 = xmm25[8],xmm23[8],xmm25[9],xmm23[9],xmm25[10],xmm23[10],xmm25[11],xmm23[11],xmm25[12],xmm23[12],xmm25[13],xmm23[13],xmm25[14],xmm23[14],xmm25[15],xmm23[15]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm14, %ymm14, %ymm14
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm14, %zmm14, %zmm14
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm14, %zmm14
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm15, %ymm15, %ymm15
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm15, %zmm15, %zmm15
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm15, %zmm15
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm14, %zmm15 {%k1}
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm28[8],xmm19[8],xmm28[9],xmm19[9],xmm28[10],xmm19[10],xmm28[11],xmm19[11],xmm28[12],xmm19[12],xmm28[13],xmm19[13],xmm28[14],xmm19[14],xmm28[15],xmm19[15]
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm19 = xmm26[8],xmm22[8],xmm26[9],xmm22[9],xmm26[10],xmm22[10],xmm26[11],xmm22[11],xmm26[12],xmm22[12],xmm26[13],xmm22[13],xmm26[14],xmm22[14],xmm26[15],xmm22[15]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm19, %xmm22
-; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm23 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm23, %ymm22
-; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm19, %ymm19, %ymm19
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm19, %ymm19
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm19, %zmm22, %zmm19
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm14, %ymm14, %ymm14
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm14, %zmm14, %zmm14
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm14, %zmm14
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm14, %zmm19 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm15, %zmm19 {%k3}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm20, %ymm20
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm21, %zmm20
+; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rsi), %xmm25
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm21 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
+; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rdi), %xmm23
+; AVX512BW-FCP-NEXT:    vpermw %zmm21, %zmm8, %zmm20 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rcx), %xmm26
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm30[8],xmm29[8],xmm30[9],xmm29[9],xmm30[10],xmm29[10],xmm30[11],xmm29[11],xmm30[12],xmm29[12],xmm30[13],xmm29[13],xmm30[14],xmm29[14],xmm30[15],xmm29[15]
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm31[8],xmm0[9],xmm31[9],xmm0[10],xmm31[10],xmm0[11],xmm31[11],xmm0[12],xmm31[12],xmm0[13],xmm31[13],xmm0[14],xmm31[14],xmm0[15],xmm31[15]
+; AVX512BW-FCP-NEXT:    vpermw %zmm0, %zmm13, %zmm21
+; AVX512BW-FCP-NEXT:    vpermw %zmm22, %zmm14, %zmm21 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm23[0],xmm25[0],xmm23[1],xmm25[1],xmm23[2],xmm25[2],xmm23[3],xmm25[3],xmm23[4],xmm25[4],xmm23[5],xmm25[5],xmm23[6],xmm25[6],xmm23[7],xmm25[7]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm22
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm27 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm27, %ymm22
+; AVX512BW-FCP-NEXT:    vmovdqa64 16(%rdx), %xmm27
 ; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512BW-FCP-NEXT:    vpshufb %zmm2, %zmm0, %zmm0
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm1, %ymm1, %ymm1
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm1, %zmm1
-; AVX512BW-FCP-NEXT:    vpshufb %zmm5, %zmm1, %zmm1
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm0, %zmm1 {%k1}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm0, %ymm0
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm22, %zmm22
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm27[0],xmm26[0],xmm27[1],xmm26[1],xmm27[2],xmm26[2],xmm27[3],xmm26[3],xmm27[4],xmm26[4],xmm27[5],xmm26[5],xmm27[6],xmm26[6],xmm27[7],xmm26[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm0, %zmm8, %zmm22 {%k1}
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm23[8],xmm25[8],xmm23[9],xmm25[9],xmm23[10],xmm25[10],xmm23[11],xmm25[11],xmm23[12],xmm25[12],xmm23[13],xmm25[13],xmm23[14],xmm25[14],xmm23[15],xmm25[15]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm23
+; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm25 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm25, %ymm23
 ; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512BW-FCP-NEXT:    vpshufb %zmm9, %zmm0, %zmm0
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm2, %ymm2, %ymm3
-; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm3, %ymm3
-; AVX512BW-FCP-NEXT:    vpshufb %xmm13, %xmm2, %xmm4
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm0, %ymm0
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm23, %zmm0
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm23 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
+; AVX512BW-FCP-NEXT:    vpermw %zmm23, %zmm8, %zmm0 {%k1}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
+; AVX512BW-FCP-NEXT:    # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
+; AVX512BW-FCP-NEXT:    vinserti32x4 $1, %xmm2, %ymm2, %ymm23
+; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm23, %ymm4
+; AVX512BW-FCP-NEXT:    vmovdqa64 16(%r10), %xmm23
+; AVX512BW-FCP-NEXT:    vpshufb %xmm5, %xmm2, %xmm5
 ; AVX512BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm4, %ymm2, %ymm2
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512BW-FCP-NEXT:    vmovdqu16 %zmm0, %zmm2 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm1, %zmm2 {%k3}
+; AVX512BW-FCP-NEXT:    vinserti128 $1, %xmm5, %ymm2, %ymm2
+; AVX512BW-FCP-NEXT:    vmovdqa 16(%rax), %xmm5
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm2
+; AVX512BW-FCP-NEXT:    vmovdqa 16(%r9), %xmm4
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3],xmm7[4],xmm3[4],xmm7[5],xmm3[5],xmm7[6],xmm3[6],xmm7[7],xmm3[7]
+; AVX512BW-FCP-NEXT:    vmovdqa 16(%r8), %xmm7
+; AVX512BW-FCP-NEXT:    vpermw %zmm3, %zmm8, %zmm2 {%k1}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm23[0],xmm5[1],xmm23[1],xmm5[2],xmm23[2],xmm5[3],xmm23[3],xmm5[4],xmm23[4],xmm5[5],xmm23[5],xmm5[6],xmm23[6],xmm5[7],xmm23[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3],xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm8, %zmm13, %zmm8
+; AVX512BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm8 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm5[8],xmm23[8],xmm5[9],xmm23[9],xmm5[10],xmm23[10],xmm5[11],xmm23[11],xmm5[12],xmm23[12],xmm5[13],xmm23[13],xmm5[14],xmm23[14],xmm5[15],xmm23[15]
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm7[8],xmm4[8],xmm7[9],xmm4[9],xmm7[10],xmm4[10],xmm7[11],xmm4[11],xmm7[12],xmm4[12],xmm7[13],xmm4[13],xmm7[14],xmm4[14],xmm7[15],xmm4[15]
+; AVX512BW-FCP-NEXT:    vpermw %zmm4, %zmm13, %zmm4
+; AVX512BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm4 {%k2}
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3],xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm5 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
+; AVX512BW-FCP-NEXT:    vpermw %zmm5, %zmm13, %zmm5
+; AVX512BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm5 {%k2}
+; AVX512BW-FCP-NEXT:    movw $-21846, %ax # imm = 0xAAAA
+; AVX512BW-FCP-NEXT:    kmovd %eax, %k1
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm12, %zmm1 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm16, %zmm15 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm18, %zmm17 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm19 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm21, %zmm20 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm8, %zmm22 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm4, %zmm0 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa32 %zmm5, %zmm2 {%k1}
 ; AVX512BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm2, (%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm19, 192(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm21, 128(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm18, 320(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm20, 256(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm0, 192(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm22, 128(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm20, 320(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm19, 256(%rax)
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm17, 448(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm16, 384(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm8, 64(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm15, 384(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm1, 64(%rax)
 ; AVX512BW-FCP-NEXT:    vzeroupper
 ; AVX512BW-FCP-NEXT:    retq
 ;
@@ -9336,234 +9276,172 @@ define void @store_i8_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP:       # %bb.0:
 ; AVX512DQ-BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512DQ-BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r10), %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 16(%r10), %xmm14
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r10), %xmm18
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r10), %xmm17
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rax), %xmm1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 16(%rax), %xmm15
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rax), %xmm19
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rax), %xmm20
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm2, %ymm2, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm2, %zmm3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [0,1,0,1,0,1,0,1,8,9,10,11,4,5,2,3,0,1,4,5,0,1,4,5,8,9,10,11,4,5,6,7,0,1,2,3,8,9,8,9,8,9,8,9,12,13,10,11,0,1,2,3,8,9,12,13,8,9,12,13,12,13,14,15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm3, %zmm6
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r9), %xmm3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r9), %xmm21
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r8), %xmm4
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r8), %xmm22
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm5, %ymm5, %ymm5
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm5, %zmm5, %zmm7
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [0,1,2,3,0,1,2,3,8,9,10,11,2,3,6,7,4,5,2,3,4,5,2,3,8,9,10,11,6,7,6,7,0,1,2,3,8,9,10,11,8,9,10,11,10,11,14,15,0,1,2,3,12,13,10,11,12,13,10,11,14,15,14,15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm7, %zmm16
-; AVX512DQ-BW-FCP-NEXT:    movl $-2004318072, %eax # imm = 0x88888888
-; AVX512DQ-BW-FCP-NEXT:    kmovd %eax, %k1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm6, %zmm16 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rcx), %xmm6
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rcx), %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rsi), %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm20
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rsi), %xmm17
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rdi), %xmm21
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rdi), %xmm18
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm2[8],xmm0[8],xmm2[9],xmm0[9],xmm2[10],xmm0[10],xmm2[11],xmm0[11],xmm2[12],xmm0[12],xmm2[13],xmm0[13],xmm2[14],xmm0[14],xmm2[15],xmm0[15]
+; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm3
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxwq {{.*#+}} ymm4 = [2312,2826,3340,3854]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm3, %ymm3
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxwq {{.*#+}} xmm5 = [1284,1798]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm6
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm6, %ymm0, %ymm0
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rcx), %xmm3
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm22
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rcx), %xmm19
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdx), %xmm7
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm7[8],xmm6[8],xmm7[9],xmm6[9],xmm7[10],xmm6[10],xmm7[11],xmm6[11],xmm7[12],xmm6[12],xmm7[13],xmm6[13],xmm7[14],xmm6[14],xmm7[15],xmm6[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm8, %ymm8, %ymm8
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm8, %zmm8, %zmm8
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm9 = [0,1,0,1,4,5,2,3,4,5,2,3,12,13,14,15,0,1,4,5,4,5,6,7,4,5,6,7,12,13,14,15,8,9,8,9,4,5,6,7,12,13,10,11,12,13,10,11,8,9,12,13,4,5,6,7,12,13,14,15,12,13,14,15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm8, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rsi), %xmm10
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rsi), %xmm25
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdi), %xmm11
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rdi), %xmm28
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm8, %ymm8, %ymm13
-; AVX512DQ-BW-FCP-NEXT:    vpmovsxwq {{.*#+}} ymm12 = [2312,2826,3340,3854]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm13, %ymm26
-; AVX512DQ-BW-FCP-NEXT:    vpmovsxwq {{.*#+}} xmm13 = [1284,1798]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm8, %xmm27
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm8 = xmm8[0],zero,zero,zero,xmm8[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm8, %ymm8
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm8, %zmm8
-; AVX512DQ-BW-FCP-NEXT:    movl $572662306, %eax # imm = 0x22222222
-; AVX512DQ-BW-FCP-NEXT:    kmovd %eax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm24, %zmm8 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    movw $-21846, %ax # imm = 0xAAAA
-; AVX512DQ-BW-FCP-NEXT:    kmovd %eax, %k3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm16, %zmm8 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm20[0],xmm17[0],xmm20[1],xmm17[1],xmm20[2],xmm17[2],xmm20[3],xmm17[3],xmm20[4],xmm17[4],xmm20[5],xmm17[5],xmm20[6],xmm17[6],xmm20[7],xmm17[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm16, %ymm16, %ymm16
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm16, %zmm16
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm16, %zmm16
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm22[0],xmm21[0],xmm22[1],xmm21[1],xmm22[2],xmm21[2],xmm22[3],xmm21[3],xmm22[4],xmm21[4],xmm22[5],xmm21[5],xmm22[6],xmm21[6],xmm22[7],xmm21[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm24, %ymm24, %ymm24
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm24, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm24, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm16, %zmm24 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm28[0],xmm25[0],xmm28[1],xmm25[1],xmm28[2],xmm25[2],xmm28[3],xmm25[3],xmm28[4],xmm25[4],xmm28[5],xmm25[5],xmm28[6],xmm25[6],xmm28[7],xmm25[7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm16, %xmm26
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm27 = xmm16[0],zero,zero,zero,xmm16[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm27, %ymm26
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rdx), %xmm30
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm16, %ymm16, %ymm16
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm16, %ymm16
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm26, %zmm16
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm26 = xmm30[0],xmm23[0],xmm30[1],xmm23[1],xmm30[2],xmm23[2],xmm30[3],xmm23[3],xmm30[4],xmm23[4],xmm30[5],xmm23[5],xmm30[6],xmm23[6],xmm30[7],xmm23[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm26, %ymm26
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm26, %zmm26
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm26, %zmm26
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm26, %zmm16 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm26
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm16 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm27
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm20[8],xmm17[8],xmm20[9],xmm17[9],xmm20[10],xmm17[10],xmm20[11],xmm17[11],xmm20[12],xmm17[12],xmm20[13],xmm17[13],xmm20[14],xmm17[14],xmm20[15],xmm17[15]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm24
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm22[8],xmm21[8],xmm22[9],xmm21[9],xmm22[10],xmm21[10],xmm22[11],xmm21[11],xmm22[12],xmm21[12],xmm22[13],xmm21[13],xmm22[14],xmm21[14],xmm22[15],xmm21[15]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm21
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm17
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm17, %zmm17
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm17, %zmm17
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm20, %zmm20
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm20, %zmm20
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm17, %zmm20 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rdi), %xmm29
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm30[8],xmm23[8],xmm30[9],xmm23[9],xmm30[10],xmm23[10],xmm30[11],xmm23[11],xmm30[12],xmm23[12],xmm30[13],xmm23[13],xmm30[14],xmm23[14],xmm30[15],xmm23[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rdx), %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rdx), %xmm24
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm7[8],xmm3[8],xmm7[9],xmm3[9],xmm7[10],xmm3[10],xmm7[11],xmm3[11],xmm7[12],xmm3[12],xmm7[13],xmm3[13],xmm7[14],xmm3[14],xmm7[15],xmm3[15]
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm8 = [0,0,2,1,2,1,6,7,0,2,2,3,2,3,6,7,4,4,2,3,6,5,6,5,4,6,2,3,6,7,6,7]
+; AVX512DQ-BW-FCP-NEXT:    movl $572662306, %r11d # imm = 0x22222222
+; AVX512DQ-BW-FCP-NEXT:    kmovd %r11d, %k1
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm6, %zmm8, %zmm1 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r10), %xmm6
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r10), %xmm25
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rax), %xmm9
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%rax), %xmm26
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm15 = xmm9[8],xmm6[8],xmm9[9],xmm6[9],xmm9[10],xmm6[10],xmm9[11],xmm6[11],xmm9[12],xmm6[12],xmm9[13],xmm6[13],xmm9[14],xmm6[14],xmm9[15],xmm6[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r9), %xmm10
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r9), %xmm27
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%r8), %xmm11
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 48(%r8), %xmm28
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm12 = xmm11[8],xmm10[8],xmm11[9],xmm10[9],xmm11[10],xmm10[10],xmm11[11],xmm10[11],xmm11[12],xmm10[12],xmm11[13],xmm10[13],xmm11[14],xmm10[14],xmm11[15],xmm10[15]
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm13 = [0,1,0,1,4,5,1,3,2,1,2,1,4,5,3,3,0,1,4,5,4,5,5,7,0,1,6,5,6,5,7,7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm12, %zmm13, %zmm12
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm14 = [0,0,0,0,4,5,2,1,0,2,0,2,4,5,2,3,0,1,4,4,4,4,6,5,0,1,4,6,4,6,6,7]
+; AVX512DQ-BW-FCP-NEXT:    movl $-2004318072, %r11d # imm = 0x88888888
+; AVX512DQ-BW-FCP-NEXT:    kmovd %r11d, %k2
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm14, %zmm12 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm18[0],xmm17[0],xmm18[1],xmm17[1],xmm18[2],xmm17[2],xmm18[3],xmm17[3],xmm18[4],xmm17[4],xmm18[5],xmm17[5],xmm18[6],xmm17[6],xmm18[7],xmm17[7]
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm15, %ymm15, %ymm16
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm16, %ymm16
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm15, %xmm29
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm15 = xmm15[0],zero,zero,zero,xmm15[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm29, %ymm15, %ymm15
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm16, %zmm15, %zmm15
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm24[0],xmm19[0],xmm24[1],xmm19[1],xmm24[2],xmm19[2],xmm24[3],xmm19[3],xmm24[4],xmm19[4],xmm24[5],xmm19[5],xmm24[6],xmm19[6],xmm24[7],xmm19[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm8, %zmm15 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm29 = xmm26[0],xmm25[0],xmm26[1],xmm25[1],xmm26[2],xmm25[2],xmm26[3],xmm25[3],xmm26[4],xmm25[4],xmm26[5],xmm25[5],xmm26[6],xmm25[6],xmm26[7],xmm25[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm16 = xmm28[0],xmm27[0],xmm28[1],xmm27[1],xmm28[2],xmm27[2],xmm28[3],xmm27[3],xmm28[4],xmm27[4],xmm28[5],xmm27[5],xmm28[6],xmm27[6],xmm28[7],xmm27[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm13, %zmm16
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm29, %zmm14, %zmm16 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm18[8],xmm17[8],xmm18[9],xmm17[9],xmm18[10],xmm17[10],xmm18[11],xmm17[11],xmm18[12],xmm17[12],xmm18[13],xmm17[13],xmm18[14],xmm17[14],xmm18[15],xmm17[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm17, %xmm18
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm29 = xmm17[0],zero,zero,zero,xmm17[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm29, %ymm18
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r10), %xmm29
 ; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm17
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm17, %zmm17
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm17, %zmm22
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm17 = xmm28[8],xmm25[8],xmm28[9],xmm25[9],xmm28[10],xmm25[10],xmm28[11],xmm25[11],xmm28[12],xmm25[12],xmm28[13],xmm25[13],xmm28[14],xmm25[14],xmm28[15],xmm25[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm17, %ymm17, %ymm23
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm23, %ymm23
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm17, %xmm25
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm17 = xmm17[0],zero,zero,zero,xmm17[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm25, %ymm17, %ymm17
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm23, %zmm17, %zmm17
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm22, %zmm17 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm20, %zmm17 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm20 = xmm19[0],xmm18[0],xmm19[1],xmm18[1],xmm19[2],xmm18[2],xmm19[3],xmm18[3],xmm19[4],xmm18[4],xmm19[5],xmm18[5],xmm19[6],xmm18[6],xmm19[7],xmm18[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm20, %zmm20
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm20, %zmm20
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm22 = xmm27[0],xmm26[0],xmm27[1],xmm26[1],xmm27[2],xmm26[2],xmm27[3],xmm26[3],xmm27[4],xmm26[4],xmm27[5],xmm26[5],xmm27[6],xmm26[6],xmm27[7],xmm26[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm22, %ymm22
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm22, %zmm22, %zmm22
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm22, %zmm22
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm20, %zmm22 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm20 = xmm29[0],xmm21[0],xmm29[1],xmm21[1],xmm29[2],xmm21[2],xmm29[3],xmm21[3],xmm29[4],xmm21[4],xmm29[5],xmm21[5],xmm29[6],xmm21[6],xmm29[7],xmm21[7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm20, %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm17, %ymm17
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm17
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rax), %xmm30
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm24[8],xmm19[8],xmm24[9],xmm19[9],xmm24[10],xmm19[10],xmm24[11],xmm19[11],xmm24[12],xmm19[12],xmm24[13],xmm19[13],xmm24[14],xmm19[14],xmm24[15],xmm19[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm31
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm18, %zmm8, %zmm17 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%r8), %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm19 = xmm26[8],xmm25[8],xmm26[9],xmm25[9],xmm26[10],xmm25[10],xmm26[11],xmm25[11],xmm26[12],xmm25[12],xmm26[13],xmm25[13],xmm26[14],xmm25[14],xmm26[15],xmm25[15]
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm28[8],xmm27[8],xmm28[9],xmm27[9],xmm28[10],xmm27[10],xmm28[11],xmm27[11],xmm28[12],xmm27[12],xmm28[13],xmm27[13],xmm28[14],xmm27[14],xmm28[15],xmm27[15]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm18, %zmm13, %zmm18
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm19, %zmm14, %zmm18 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm21[0],xmm20[0],xmm21[1],xmm20[1],xmm21[2],xmm20[2],xmm21[3],xmm20[3],xmm21[4],xmm20[4],xmm21[5],xmm20[5],xmm21[6],xmm20[6],xmm21[7],xmm20[7]
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm19, %ymm19, %ymm24
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm24, %ymm24
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm19, %xmm25
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm19 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm25, %ymm19, %ymm19
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm19, %zmm19
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm23[0],xmm22[0],xmm23[1],xmm22[1],xmm23[2],xmm22[2],xmm23[3],xmm22[3],xmm23[4],xmm22[4],xmm23[5],xmm22[5],xmm23[6],xmm22[6],xmm23[7],xmm22[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm24, %zmm8, %zmm19 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm25 = xmm30[0],xmm29[0],xmm30[1],xmm29[1],xmm30[2],xmm29[2],xmm30[3],xmm29[3],xmm30[4],xmm29[4],xmm30[5],xmm29[5],xmm30[6],xmm29[6],xmm30[7],xmm29[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm0[0],xmm31[0],xmm0[1],xmm31[1],xmm0[2],xmm31[2],xmm0[3],xmm31[3],xmm0[4],xmm31[4],xmm0[5],xmm31[5],xmm0[6],xmm31[6],xmm0[7],xmm31[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm24, %zmm13, %zmm24
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm25, %zmm14, %zmm24 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm21[8],xmm20[8],xmm21[9],xmm20[9],xmm21[10],xmm20[10],xmm21[11],xmm20[11],xmm21[12],xmm20[12],xmm21[13],xmm20[13],xmm21[14],xmm20[14],xmm21[15],xmm20[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm20, %xmm21
 ; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm25 = xmm20[0],zero,zero,zero,xmm20[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm25, %ymm23
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rdx), %xmm28
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm25, %ymm21
 ; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm20, %ymm20, %ymm20
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm20, %ymm20
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm23, %zmm20
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm23 = xmm28[0],xmm24[0],xmm28[1],xmm24[1],xmm28[2],xmm24[2],xmm28[3],xmm24[3],xmm28[4],xmm24[4],xmm28[5],xmm24[5],xmm28[6],xmm24[6],xmm28[7],xmm24[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm23, %ymm23
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm23, %zmm23, %zmm23
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm23, %zmm23
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm23, %zmm20 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%r9), %xmm23
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm22, %zmm20 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%r8), %xmm25
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm19[8],xmm18[8],xmm19[9],xmm18[9],xmm19[10],xmm18[10],xmm19[11],xmm18[11],xmm19[12],xmm18[12],xmm19[13],xmm18[13],xmm19[14],xmm18[14],xmm19[15],xmm18[15]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rcx), %xmm19
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm26 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rsi), %xmm22
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm18
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm18, %zmm18, %zmm18
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm18, %zmm18
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm26, %ymm26, %ymm26
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm26, %zmm26, %zmm26
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm26, %zmm27
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm18, %zmm27 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rdi), %xmm26
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm28[8],xmm24[8],xmm28[9],xmm24[9],xmm28[10],xmm24[10],xmm28[11],xmm24[11],xmm28[12],xmm24[12],xmm28[13],xmm24[13],xmm28[14],xmm24[14],xmm28[15],xmm24[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm18
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm18, %zmm18, %zmm18
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm18, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm18 = xmm29[8],xmm21[8],xmm29[9],xmm21[9],xmm29[10],xmm21[10],xmm29[11],xmm21[11],xmm29[12],xmm21[12],xmm29[13],xmm21[13],xmm29[14],xmm21[14],xmm29[15],xmm21[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm18, %ymm18, %ymm21
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm21, %ymm21
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm18, %xmm28
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm18 = xmm18[0],zero,zero,zero,xmm18[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm28, %ymm18, %ymm18
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm18, %zmm18
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm24, %zmm18 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm27, %zmm18 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm21 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm21, %ymm21
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm21, %zmm21
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm21, %zmm21
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm24 = xmm25[0],xmm23[0],xmm25[1],xmm23[1],xmm25[2],xmm23[2],xmm25[3],xmm23[3],xmm25[4],xmm23[4],xmm25[5],xmm23[5],xmm25[6],xmm23[6],xmm25[7],xmm23[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm24, %ymm24, %ymm24
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm24, %zmm24, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm24, %zmm24
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm21, %zmm24 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm21 = xmm26[0],xmm22[0],xmm26[1],xmm22[1],xmm26[2],xmm22[2],xmm26[3],xmm22[3],xmm26[4],xmm22[4],xmm26[5],xmm22[5],xmm26[6],xmm22[6],xmm26[7],xmm22[7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm21, %xmm27
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm28 = xmm21[0],zero,zero,zero,xmm21[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm28, %ymm27
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rdx), %xmm28
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm21, %ymm21, %ymm21
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm21, %ymm21
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm21, %zmm27, %zmm21
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm27 = xmm28[0],xmm19[0],xmm28[1],xmm19[1],xmm28[2],xmm19[2],xmm28[3],xmm19[3],xmm28[4],xmm19[4],xmm28[5],xmm19[5],xmm28[6],xmm19[6],xmm28[7],xmm19[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm27, %ymm27, %ymm27
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm27, %zmm27, %zmm27
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm27, %zmm27
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm27, %zmm21 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm21 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm15 = xmm25[8],xmm23[8],xmm25[9],xmm23[9],xmm25[10],xmm23[10],xmm25[11],xmm23[11],xmm25[12],xmm23[12],xmm25[13],xmm23[13],xmm25[14],xmm23[14],xmm25[15],xmm23[15]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm14, %ymm14, %ymm14
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm14, %zmm14, %zmm14
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm14, %zmm14
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm15, %ymm15, %ymm15
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm15, %zmm15, %zmm15
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm15, %zmm15
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm14, %zmm15 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm28[8],xmm19[8],xmm28[9],xmm19[9],xmm28[10],xmm19[10],xmm28[11],xmm19[11],xmm28[12],xmm19[12],xmm28[13],xmm19[13],xmm28[14],xmm19[14],xmm28[15],xmm19[15]
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm19 = xmm26[8],xmm22[8],xmm26[9],xmm22[9],xmm26[10],xmm22[10],xmm26[11],xmm22[11],xmm26[12],xmm22[12],xmm26[13],xmm22[13],xmm26[14],xmm22[14],xmm26[15],xmm22[15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm19, %xmm22
-; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm23 = xmm19[0],zero,zero,zero,xmm19[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm23, %ymm22
-; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm19, %ymm19, %ymm19
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm19, %ymm19
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm19, %zmm22, %zmm19
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm14, %ymm14, %ymm14
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm14, %zmm14, %zmm14
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm14, %zmm14
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm14, %zmm19 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm15, %zmm19 {%k3}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3],xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm20, %ymm20
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm20, %zmm21, %zmm20
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rsi), %xmm25
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm21 = xmm23[8],xmm22[8],xmm23[9],xmm22[9],xmm23[10],xmm22[10],xmm23[11],xmm22[11],xmm23[12],xmm22[12],xmm23[13],xmm22[13],xmm23[14],xmm22[14],xmm23[15],xmm22[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rdi), %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm21, %zmm8, %zmm20 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rcx), %xmm26
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm30[8],xmm29[8],xmm30[9],xmm29[9],xmm30[10],xmm29[10],xmm30[11],xmm29[11],xmm30[12],xmm29[12],xmm30[13],xmm29[13],xmm30[14],xmm29[14],xmm30[15],xmm29[15]
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm31[8],xmm0[9],xmm31[9],xmm0[10],xmm31[10],xmm0[11],xmm31[11],xmm0[12],xmm31[12],xmm0[13],xmm31[13],xmm0[14],xmm31[14],xmm0[15],xmm31[15]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm0, %zmm13, %zmm21
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm22, %zmm14, %zmm21 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm23[0],xmm25[0],xmm23[1],xmm25[1],xmm23[2],xmm25[2],xmm23[3],xmm25[3],xmm23[4],xmm25[4],xmm23[5],xmm25[5],xmm23[6],xmm25[6],xmm23[7],xmm25[7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm22
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm27 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm22, %ymm27, %ymm22
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%rdx), %xmm27
 ; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm2, %zmm0, %zmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm4[0],xmm3[0],xmm4[1],xmm3[1],xmm4[2],xmm3[2],xmm4[3],xmm3[3],xmm4[4],xmm3[4],xmm4[5],xmm3[5],xmm4[6],xmm3[6],xmm4[7],xmm3[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm1, %ymm1, %ymm1
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm1, %zmm1
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm5, %zmm1, %zmm1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm0, %zmm1 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm7[0],xmm6[0],xmm7[1],xmm6[1],xmm7[2],xmm6[2],xmm7[3],xmm6[3],xmm7[4],xmm6[4],xmm7[5],xmm6[5],xmm7[6],xmm6[6],xmm7[7],xmm6[7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm0, %ymm0
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm22, %zmm22
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm27[0],xmm26[0],xmm27[1],xmm26[1],xmm27[2],xmm26[2],xmm27[3],xmm26[3],xmm27[4],xmm26[4],xmm27[5],xmm26[5],xmm27[6],xmm26[6],xmm27[7],xmm26[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm0, %zmm8, %zmm22 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm23[8],xmm25[8],xmm23[9],xmm25[9],xmm23[10],xmm25[10],xmm23[11],xmm25[11],xmm23[12],xmm25[12],xmm23[13],xmm25[13],xmm23[14],xmm25[14],xmm23[15],xmm25[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm0, %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm25 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm23, %ymm25, %ymm23
 ; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %zmm9, %zmm0, %zmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm2, %ymm2, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm3, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm13, %xmm2, %xmm4
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm0, %ymm0
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm23, %zmm0
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm23 = xmm27[8],xmm26[8],xmm27[9],xmm26[9],xmm27[10],xmm26[10],xmm27[11],xmm26[11],xmm27[12],xmm26[12],xmm27[13],xmm26[13],xmm27[14],xmm26[14],xmm27[15],xmm26[15]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm23, %zmm8, %zmm0 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
+; AVX512DQ-BW-FCP-NEXT:    # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3],xmm2[4],mem[4],xmm2[5],mem[5],xmm2[6],mem[6],xmm2[7],mem[7]
+; AVX512DQ-BW-FCP-NEXT:    vinserti32x4 $1, %xmm2, %ymm2, %ymm23
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm23, %ymm4
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 16(%r10), %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm5, %xmm2, %xmm5
 ; AVX512DQ-BW-FCP-NEXT:    vpmovzxwq {{.*#+}} xmm2 = xmm2[0],zero,zero,zero,xmm2[1],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm4, %ymm2, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu16 %zmm0, %zmm2 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm1, %zmm2 {%k3}
+; AVX512DQ-BW-FCP-NEXT:    vinserti128 $1, %xmm5, %ymm2, %ymm2
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 16(%rax), %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm2
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 16(%r9), %xmm4
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm7[0],xmm3[0],xmm7[1],xmm3[1],xmm7[2],xmm3[2],xmm7[3],xmm3[3],xmm7[4],xmm3[4],xmm7[5],xmm3[5],xmm7[6],xmm3[6],xmm7[7],xmm3[7]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 16(%r8), %xmm7
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm3, %zmm8, %zmm2 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm5[0],xmm23[0],xmm5[1],xmm23[1],xmm5[2],xmm23[2],xmm5[3],xmm23[3],xmm5[4],xmm23[4],xmm5[5],xmm23[5],xmm5[6],xmm23[6],xmm5[7],xmm23[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm7[0],xmm4[0],xmm7[1],xmm4[1],xmm7[2],xmm4[2],xmm7[3],xmm4[3],xmm7[4],xmm4[4],xmm7[5],xmm4[5],xmm7[6],xmm4[6],xmm7[7],xmm4[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm8, %zmm13, %zmm8
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm8 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm5[8],xmm23[8],xmm5[9],xmm23[9],xmm5[10],xmm23[10],xmm5[11],xmm23[11],xmm5[12],xmm23[12],xmm5[13],xmm23[13],xmm5[14],xmm23[14],xmm5[15],xmm23[15]
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm7[8],xmm4[8],xmm7[9],xmm4[9],xmm7[10],xmm4[10],xmm7[11],xmm4[11],xmm7[12],xmm4[12],xmm7[13],xmm4[13],xmm7[14],xmm4[14],xmm7[15],xmm4[15]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm4, %zmm13, %zmm4
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm4 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm9[0],xmm6[0],xmm9[1],xmm6[1],xmm9[2],xmm6[2],xmm9[3],xmm6[3],xmm9[4],xmm6[4],xmm9[5],xmm6[5],xmm9[6],xmm6[6],xmm9[7],xmm6[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm5 = xmm11[0],xmm10[0],xmm11[1],xmm10[1],xmm11[2],xmm10[2],xmm11[3],xmm10[3],xmm11[4],xmm10[4],xmm11[5],xmm10[5],xmm11[6],xmm10[6],xmm11[7],xmm10[7]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm5, %zmm13, %zmm5
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm3, %zmm14, %zmm5 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    movw $-21846, %ax # imm = 0xAAAA
+; AVX512DQ-BW-FCP-NEXT:    kmovd %eax, %k1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm12, %zmm1 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm16, %zmm15 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm18, %zmm17 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm24, %zmm19 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm21, %zmm20 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm8, %zmm22 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm4, %zmm0 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa32 %zmm5, %zmm2 {%k1}
 ; AVX512DQ-BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm2, (%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm19, 192(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm21, 128(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm18, 320(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm20, 256(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm0, 192(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm22, 128(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm20, 320(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm19, 256(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm17, 448(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm16, 384(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm8, 64(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm15, 384(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm1, 64(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vzeroupper
 ; AVX512DQ-BW-FCP-NEXT:    retq
   %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64


        


More information about the llvm-commits mailing list