[llvm] [X86] combineINSERT_SUBVECTOR - directly fold to X86ISD::SUBV_BROADCAST_LOAD to prevent vector split infinite loop (PR #145077)

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Fri Jun 20 10:23:00 PDT 2025


https://github.com/RKSimon created https://github.com/llvm/llvm-project/pull/145077

This reverts #140919 / f1d03dedfbe87119cfcafb07e0e0f90ec291cb97 - which could result in another fold trying to split the concatenation apart again before it was folded to a SUBV_BROADCAST_LOAD

>From f4002b1bc0b614f0a0b0189d419ee47e05c6f680 Mon Sep 17 00:00:00 2001
From: Simon Pilgrim <llvm-dev at redking.me.uk>
Date: Fri, 20 Jun 2025 18:22:11 +0100
Subject: [PATCH] [X86] combineINSERT_SUBVECTOR - directly fold to
 X86ISD::SUBV_BROADCAST_LOAD to prevent vector split infinite loop

This reverts #140919 / f1d03dedfbe87119cfcafb07e0e0f90ec291cb97 - which could result in another fold trying to split the concatenation apart again before it was folded to a SUBV_BROADCAST_LOAD
---
 llvm/lib/Target/X86/X86ISelLowering.cpp       |  10 +-
 llvm/test/CodeGen/X86/oddshuffles.ll          |  47 ++
 .../vector-interleaved-store-i8-stride-7.ll   | 556 +++++++++---------
 3 files changed, 332 insertions(+), 281 deletions(-)

diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp
index 35d7b80844056..33083c0eba695 100644
--- a/llvm/lib/Target/X86/X86ISelLowering.cpp
+++ b/llvm/lib/Target/X86/X86ISelLowering.cpp
@@ -59465,8 +59465,7 @@ static SDValue combineINSERT_SUBVECTOR(SDNode *N, SelectionDAG &DAG,
   }
 
   // If we're splatting the lower half subvector of a full vector load into the
-  // upper half, just splat the subvector directly, potentially creating a
-  // subvector broadcast.
+  // upper half, attempt to create a subvector broadcast.
   if ((int)IdxVal == (VecNumElts / 2) &&
       Vec.getValueSizeInBits() == (2 * SubVec.getValueSizeInBits())) {
     auto *VecLd = dyn_cast<LoadSDNode>(Vec);
@@ -59474,7 +59473,12 @@ static SDValue combineINSERT_SUBVECTOR(SDNode *N, SelectionDAG &DAG,
     if (VecLd && SubLd &&
         DAG.areNonVolatileConsecutiveLoads(
             SubLd, VecLd, SubVec.getValueSizeInBits() / 8, 0)) {
-      return concatSubVectors(SubVec, SubVec, DAG, dl);
+      SDValue BcastLd = getBROADCAST_LOAD(X86ISD::SUBV_BROADCAST_LOAD, dl, OpVT,
+                                          SubVecVT, SubLd, 0, DAG);
+      SDValue NewSubVec = DAG.getNode(ISD::EXTRACT_SUBVECTOR, dl, SubVecVT,
+                                      BcastLd, DAG.getVectorIdxConstant(0, dl));
+      DCI.CombineTo(SubLd, NewSubVec, BcastLd.getValue(1));
+      return BcastLd;
     }
   }
 
diff --git a/llvm/test/CodeGen/X86/oddshuffles.ll b/llvm/test/CodeGen/X86/oddshuffles.ll
index 6b9a86343ea10..4b0f75df83a76 100644
--- a/llvm/test/CodeGen/X86/oddshuffles.ll
+++ b/llvm/test/CodeGen/X86/oddshuffles.ll
@@ -2508,3 +2508,50 @@ define void @D107009(ptr %input, ptr %output) {
   store <64 x i32> %i7, ptr %output, align 16
   ret void
 }
+
+; Ensure concatenation of repeated subvector loads before vector can be split apart.
+define void @split_v2i64_subvector_broadcast(ptr readonly align 8 captures(none) dereferenceable(64) %arg) {
+; SSE-LABEL: split_v2i64_subvector_broadcast:
+; SSE:       # %bb.0:
+; SSE-NEXT:    movups 8(%rdi), %xmm0
+; SSE-NEXT:    movups 40(%rdi), %xmm1
+; SSE-NEXT:    movaps %xmm0, %xmm2
+; SSE-NEXT:    movlhps {{.*#+}} xmm2 = xmm2[0],xmm1[0]
+; SSE-NEXT:    unpckhpd {{.*#+}} xmm0 = xmm0[1],xmm1[1]
+; SSE-NEXT:    movups %xmm0, (%rax)
+; SSE-NEXT:    movups %xmm2, (%rax)
+; SSE-NEXT:    retq
+;
+; AVX1-LABEL: split_v2i64_subvector_broadcast:
+; AVX1:       # %bb.0:
+; AVX1-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; AVX1-NEXT:    vbroadcastf128 {{.*#+}} ymm1 = mem[0,1,0,1]
+; AVX1-NEXT:    vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[3],ymm0[3]
+; AVX1-NEXT:    vmovupd %ymm0, (%rax)
+; AVX1-NEXT:    vzeroupper
+; AVX1-NEXT:    retq
+;
+; AVX2-LABEL: split_v2i64_subvector_broadcast:
+; AVX2:       # %bb.0:
+; AVX2-NEXT:    vmovups 40(%rdi), %xmm0
+; AVX2-NEXT:    vpermpd {{.*#+}} ymm0 = ymm0[0,0,2,1]
+; AVX2-NEXT:    vpermpd {{.*#+}} ymm1 = mem[0,1,1,3]
+; AVX2-NEXT:    vblendps {{.*#+}} ymm0 = ymm1[0,1],ymm0[2,3],ymm1[4,5],ymm0[6,7]
+; AVX2-NEXT:    vmovups %ymm0, (%rax)
+; AVX2-NEXT:    vzeroupper
+; AVX2-NEXT:    retq
+;
+; XOP-LABEL: split_v2i64_subvector_broadcast:
+; XOP:       # %bb.0:
+; XOP-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; XOP-NEXT:    vbroadcastf128 {{.*#+}} ymm1 = mem[0,1,0,1]
+; XOP-NEXT:    vshufpd {{.*#+}} ymm0 = ymm1[0],ymm0[0],ymm1[3],ymm0[3]
+; XOP-NEXT:    vmovupd %ymm0, (%rax)
+; XOP-NEXT:    vzeroupper
+; XOP-NEXT:    retq
+  %gep = getelementptr inbounds nuw i8, ptr %arg, i64 8
+  %load = load <6 x i64>, ptr %gep, align 8
+  %shuffle = shufflevector <6 x i64> %load, <6 x i64> poison, <4 x i32> <i32 0, i32 4, i32 1, i32 5>
+  store <4 x i64> %shuffle, ptr poison, align 8
+  ret void
+}
diff --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
index 25e489eef9d11..61bfee133d84e 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
@@ -10105,14 +10105,14 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP-LABEL: store_i8_stride7_vf64:
 ; AVX512BW-FCP:       # %bb.0:
 ; AVX512BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-FCP-NEXT:    vmovdqa64 (%rax), %zmm16
+; AVX512BW-FCP-NEXT:    vmovdqa64 (%rax), %zmm15
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%rdx), %ymm0
-; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm13 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm13, %ymm0, %ymm1
+; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm12 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm0, %ymm1
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm14 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm14, %ymm2, %ymm3
-; AVX512BW-FCP-NEXT:    vpor %ymm1, %ymm3, %ymm1
+; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm13 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm13, %ymm2, %ymm4
+; AVX512BW-FCP-NEXT:    vpor %ymm1, %ymm4, %ymm1
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
 ; AVX512BW-FCP-NEXT:    vpshufb %ymm3, %ymm0, %ymm0
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128]
@@ -10122,105 +10122,106 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%rdi), %ymm1
 ; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm2 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm2, %ymm1, %ymm3
-; AVX512BW-FCP-NEXT:    vmovdqa 32(%rsi), %ymm5
-; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm9 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm9, %ymm5, %ymm6
-; AVX512BW-FCP-NEXT:    vpor %ymm3, %ymm6, %ymm3
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm1, %ymm1
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm5, %ymm5
-; AVX512BW-FCP-NEXT:    vpor %ymm1, %ymm5, %ymm1
+; AVX512BW-FCP-NEXT:    vpshufb %ymm2, %ymm1, %ymm4
+; AVX512BW-FCP-NEXT:    vmovdqa 32(%rsi), %ymm6
+; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm8 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm8, %ymm6, %ymm5
+; AVX512BW-FCP-NEXT:    vpor %ymm4, %ymm5, %ymm4
+; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm3, %ymm1, %ymm1
+; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm3, %ymm6, %ymm6
+; AVX512BW-FCP-NEXT:    vpor %ymm1, %ymm6, %ymm1
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm3, %zmm5
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm4, %zmm4
 ; AVX512BW-FCP-NEXT:    movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
 ; AVX512BW-FCP-NEXT:    kmovq %r10, %k1
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm5 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa 32(%r9), %ymm3
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm4 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa 32(%r9), %ymm9
 ; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm0 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm0, %ymm3, %ymm6
+; AVX512BW-FCP-NEXT:    vpshufb %ymm0, %ymm9, %ymm6
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%r8), %ymm7
 ; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm1, %ymm7, %ymm8
-; AVX512BW-FCP-NEXT:    vpor %ymm6, %ymm8, %ymm8
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm7, %ymm10
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm4, %ymm3, %ymm3
-; AVX512BW-FCP-NEXT:    vpor %ymm3, %ymm10, %ymm3
-; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm8, %zmm3
+; AVX512BW-FCP-NEXT:    vpshufb %ymm1, %ymm7, %ymm10
+; AVX512BW-FCP-NEXT:    vpor %ymm6, %ymm10, %ymm10
+; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm3, %ymm7, %ymm11
+; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm3, %ymm9, %ymm9
+; AVX512BW-FCP-NEXT:    vpor %ymm11, %ymm9, %ymm9
+; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%rax), %ymm10
-; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm4 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12]
-; AVX512BW-FCP-NEXT:    vpermw %ymm10, %ymm4, %ymm11
+; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} ymm11 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10]
+; AVX512BW-FCP-NEXT:    vpermw %ymm10, %ymm11, %ymm11
 ; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
 ; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm10
 ; AVX512BW-FCP-NEXT:    movabsq $145249953336295682, %rax # imm = 0x204081020408102
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm10, %zmm3 {%k2}
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm10, %zmm9 {%k2}
 ; AVX512BW-FCP-NEXT:    movabsq $-4357498600088870461, %rax # imm = 0xC3870E1C3870E1C3
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm3, %zmm5 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa (%rdi), %ymm10
-; AVX512BW-FCP-NEXT:    vpshufb %ymm2, %ymm10, %ymm2
-; AVX512BW-FCP-NEXT:    vmovdqa (%rsi), %ymm11
-; AVX512BW-FCP-NEXT:    vpshufb %ymm9, %ymm11, %ymm3
-; AVX512BW-FCP-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512BW-FCP-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512BW-FCP-NEXT:    vmovdqa 32(%rdi), %xmm8
-; AVX512BW-FCP-NEXT:    vmovdqa64 (%rsi), %xmm18
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm9 = xmm18[8],xmm3[8],xmm18[9],xmm3[9],xmm18[10],xmm3[10],xmm18[11],xmm3[11],xmm18[12],xmm3[12],xmm18[13],xmm3[13],xmm18[14],xmm3[14],xmm18[15],xmm3[15]
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm9, %zmm4 {%k2}
+; AVX512BW-FCP-NEXT:    vmovdqa (%rdi), %ymm9
+; AVX512BW-FCP-NEXT:    vpshufb %ymm2, %ymm9, %ymm2
+; AVX512BW-FCP-NEXT:    vmovdqa (%rsi), %ymm10
+; AVX512BW-FCP-NEXT:    vpshufb %ymm8, %ymm10, %ymm8
+; AVX512BW-FCP-NEXT:    vpor %ymm2, %ymm8, %ymm2
+; AVX512BW-FCP-NEXT:    vmovdqa64 (%rdi), %xmm18
+; AVX512BW-FCP-NEXT:    vmovdqa 32(%rdi), %xmm7
+; AVX512BW-FCP-NEXT:    vmovdqa64 (%rsi), %xmm23
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm16
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm23[8],xmm18[8],xmm23[9],xmm18[9],xmm23[10],xmm18[10],xmm23[11],xmm18[11],xmm23[12],xmm18[12],xmm23[13],xmm18[13],xmm23[14],xmm18[14],xmm23[15],xmm18[15]
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm17 = [2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm17, %xmm9, %xmm9
-; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm9, %zmm9
+; AVX512BW-FCP-NEXT:    vpshufb %xmm17, %xmm8, %xmm8
+; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm8, %zmm8
+; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm12, %ymm11, %ymm2
 ; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm13, %ymm12, %ymm2
-; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm14, %ymm13, %ymm14
-; AVX512BW-FCP-NEXT:    vpor %ymm2, %ymm14, %ymm2
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm13, %ymm12, %ymm13
+; AVX512BW-FCP-NEXT:    vpor %ymm2, %ymm13, %ymm2
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm13 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm19 = [6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm19, %xmm14, %xmm14
-; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[0,1,0,1]
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm14, %zmm2
+; AVX512BW-FCP-NEXT:    vpshufb %xmm19, %xmm13, %xmm13
+; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm13, %zmm2
 ; AVX512BW-FCP-NEXT:    movabsq $435749860008887046, %rax # imm = 0x60C183060C18306
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm2, %zmm9 {%k2}
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm2, %zmm8 {%k2}
+; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-FCP-NEXT:    vpshufb %ymm0, %ymm13, %ymm0
 ; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm14 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm0, %ymm14, %ymm0
-; AVX512BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm15 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-FCP-NEXT:    vpshufb %ymm1, %ymm15, %ymm1
+; AVX512BW-FCP-NEXT:    vpshufb %ymm1, %ymm14, %ymm1
 ; AVX512BW-FCP-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm20 = [u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm20, %xmm1, %xmm1
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
 ; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6,6,1,6,0,7,1,0,7,14,9,14,8,15,9,8,15]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512BW-FCP-NEXT:    movabsq $2323999253380730912, %rax # imm = 0x2040810204081020
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k2}
 ; AVX512BW-FCP-NEXT:    movabsq $4066998693416279096, %rax # imm = 0x3870E1C3870E1C38
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm9 {%k2}
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm8 {%k2}
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm21 = [u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm21, %xmm18, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb %xmm21, %xmm23, %xmm0
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm22 = [u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm22, %xmm3, %xmm1
+; AVX512BW-FCP-NEXT:    vpshufb %xmm22, %xmm18, %xmm1
 ; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm18[0],xmm3[1],xmm18[1],xmm3[2],xmm18[2],xmm3[3],xmm18[3],xmm3[4],xmm18[4],xmm3[5],xmm18[5],xmm3[6],xmm18[6],xmm3[7],xmm18[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm18[0],xmm23[0],xmm18[1],xmm23[1],xmm18[2],xmm23[2],xmm18[3],xmm23[3],xmm18[4],xmm23[4],xmm18[5],xmm23[5],xmm18[6],xmm23[6],xmm18[7],xmm23[7]
 ; AVX512BW-FCP-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm18 = zmm1[0,1,0,1],zmm0[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm26 = [u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm26, %xmm13, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb %xmm26, %xmm12, %xmm0
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm27 = [u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm27, %xmm12, %xmm1
+; AVX512BW-FCP-NEXT:    vpshufb %xmm27, %xmm11, %xmm1
 ; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3],xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm23 = [4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm23, %xmm1, %xmm1
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
@@ -10228,48 +10229,47 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm18 {%k2}
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm24 = [128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm24, %xmm14, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb %xmm24, %xmm13, %xmm0
 ; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm25 = [4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm25, %xmm15, %xmm1
+; AVX512BW-FCP-NEXT:    vpshufb %xmm25, %xmm14, %xmm1
 ; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm29 = [u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm29, %xmm1, %xmm1
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm28 = [u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm28, %xmm1, %xmm1
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512BW-FCP-NEXT:    movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k2}
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm28
 ; AVX512BW-FCP-NEXT:    movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm18 {%k2}
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm28[0],xmm8[1],xmm28[1],xmm8[2],xmm28[2],xmm8[3],xmm28[3],xmm8[4],xmm28[4],xmm8[5],xmm28[5],xmm8[6],xmm28[6],xmm8[7],xmm28[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm7[0],xmm16[0],xmm7[1],xmm16[1],xmm7[2],xmm16[2],xmm7[3],xmm16[3],xmm7[4],xmm16[4],xmm7[5],xmm16[5],xmm7[6],xmm16[6],xmm7[7],xmm16[7]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[28,29,30],zero,ymm10[28],zero,ymm10[30,31,30,31],zero,ymm10[29],zero,ymm10[31,28,29]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm9[28,29,30],zero,ymm9[28],zero,ymm9[30,31,30,31],zero,ymm9[29],zero,ymm9[31,28,29]
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero,ymm10[29],zero,zero,zero
 ; AVX512BW-FCP-NEXT:    vpor %ymm1, %ymm2, %ymm1
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[2,3,2,3],zmm0[0,1,0,1]
-; AVX512BW-FCP-NEXT:    vmovdqa 32(%rcx), %xmm4
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm31
 ; AVX512BW-FCP-NEXT:    vmovdqa 32(%rdx), %xmm3
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm31[0],xmm3[1],xmm31[1],xmm3[2],xmm31[2],xmm3[3],xmm31[3],xmm3[4],xmm31[4],xmm3[5],xmm31[5],xmm3[6],xmm31[6],xmm3[7],xmm31[7]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm23, %xmm1, %xmm1
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm23 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm13[30],zero,ymm13[28],zero,zero,zero,zero,ymm13[31],zero,ymm13[29],zero
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm30 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero
-; AVX512BW-FCP-NEXT:    vporq %ymm23, %ymm30, %ymm23
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm23 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29],zero,zero
+; AVX512BW-FCP-NEXT:    vporq %ymm23, %ymm29, %ymm23
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm23 = zmm23[2,3,2,3],zmm1[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm23 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm30
-; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm31
-; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm31[0],xmm30[0],xmm31[1],xmm30[1],xmm31[2],xmm30[2],xmm31[3],xmm30[3],xmm31[4],xmm30[4],xmm31[5],xmm30[5],xmm31[6],xmm30[6],xmm31[7],xmm30[7]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm29, %xmm0, %xmm0
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm14[27],zero,zero,zero,zero,ymm14[30],zero,ymm14[28],zero,zero,zero,zero,ymm14[31],zero
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm15[27],zero,zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29]
-; AVX512BW-FCP-NEXT:    vporq %ymm1, %ymm29, %ymm1
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm29
+; AVX512BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm30
+; AVX512BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm30[0],xmm29[0],xmm30[1],xmm29[1],xmm30[2],xmm29[2],xmm30[3],xmm29[3],xmm30[4],xmm29[4],xmm30[5],xmm29[5],xmm30[6],xmm29[6],xmm30[7],xmm29[7]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm28, %xmm0, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm13[27],zero,zero,zero,zero,ymm13[30],zero,ymm13[28],zero,zero,zero,zero,ymm13[31],zero
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} ymm28 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm14[27],zero,zero,zero,zero,ymm14[30],zero,ymm14[28],zero,zero,zero,zero,ymm14[31],zero,ymm14[29]
+; AVX512BW-FCP-NEXT:    vporq %ymm1, %ymm28, %ymm1
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[2,3,2,3],zmm0[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512BW-FCP-NEXT:    movabsq $580999813345182728, %rax # imm = 0x810204081020408
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k1
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
@@ -10278,82 +10278,82 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm23 {%k1}
 ; AVX512BW-FCP-NEXT:    vmovdqa64 (%rdx), %zmm0
 ; AVX512BW-FCP-NEXT:    vmovdqa64 (%rcx), %zmm1
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm29 = zmm0[4,5,6,7],zmm1[4,5,6,7]
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm28 = zmm0[4,5,6,7],zmm1[4,5,6,7]
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[4,5,6,7],zmm0[4,5,6,7]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,zmm29[23],zero,zmm29[21,22,23,26],zero,zmm29[24],zero,zmm29[28,29,26,27,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,59],zero,zero,zero,zero,zmm29[62],zero,zmm29[60],zero,zero,zero,zero,zmm29[63],zero,zmm29[61],zero
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,zmm28[23],zero,zmm28[21,22,23,26],zero,zmm28[24],zero,zmm28[28,29,26,27,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,59],zero,zero,zero,zero,zmm28[62],zero,zmm28[60],zero,zero,zero,zero,zmm28[63],zero,zmm28[61],zero
 ; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm0[25],zero,zmm0[23],zero,zero,zero,zero,zmm0[26],zero,zmm0[24],zero,zero,zero,zero,zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm0[62],zero,zmm0[60],zero,zero,zero,zero,zmm0[63],zero,zmm0[61],zero,zero
-; AVX512BW-FCP-NEXT:    vporq %zmm1, %zmm0, %zmm29
+; AVX512BW-FCP-NEXT:    vporq %zmm1, %zmm0, %zmm28
 ; AVX512BW-FCP-NEXT:    vmovdqa64 (%rdi), %zmm1
 ; AVX512BW-FCP-NEXT:    vmovdqa64 (%rsi), %zmm2
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm2[4,5,6,7],zmm1[4,5,6,7]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zero,zero,zmm6[27],zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,60,61,62],zero,zmm6[60],zero,zmm6[62,63,62,63],zero,zmm6[61],zero,zmm6[63,60,61]
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm1[4,5,6,7],zmm2[4,5,6,7]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm7 = zmm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm7[23],zero,zero,zero,zero,zmm7[26],zero,zmm7[24],zero,zero,zero,zero,zmm7[27],zero,zmm7[25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zmm7[62],zero,zmm7[60],zero,zero,zero,zero,zmm7[63],zero,zmm7[61],zero,zero,zero
-; AVX512BW-FCP-NEXT:    vporq %zmm6, %zmm7, %zmm6
-; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm7 = zmm29[2,3,2,3,6,7,6,7]
-; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm29 = zmm6[2,3,2,3,6,7,6,7]
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm2[4,5,6,7],zmm1[4,5,6,7]
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,zmm5[23],zero,zero,zero,zero,zmm5[26],zero,zmm5[24],zero,zero,zero,zero,zmm5[27],zero,zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,60,61,62],zero,zmm5[60],zero,zmm5[62,63,62,63],zero,zmm5[61],zero,zmm5[63,60,61]
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm1[4,5,6,7],zmm2[4,5,6,7]
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zero,zero,zmm6[27],zero,zmm6[25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zmm6[62],zero,zmm6[60],zero,zero,zero,zero,zmm6[63],zero,zmm6[61],zero,zero,zero
+; AVX512BW-FCP-NEXT:    vporq %zmm5, %zmm6, %zmm5
+; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm6 = zmm28[2,3,2,3,6,7,6,7]
+; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm28 = zmm5[2,3,2,3,6,7,6,7]
 ; AVX512BW-FCP-NEXT:    movabsq $1742999440035548184, %rax # imm = 0x183060C183060C18
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k1
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm7, %zmm29 {%k1}
-; AVX512BW-FCP-NEXT:    vmovdqa64 (%r8), %zmm6
-; AVX512BW-FCP-NEXT:    vmovdqa64 (%r9), %zmm7
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm6[4,5,6,7],zmm7[4,5,6,7]
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm7[4,5,6,7],zmm6[4,5,6,7]
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm6, %zmm28 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqa64 (%r8), %zmm5
+; AVX512BW-FCP-NEXT:    vmovdqa64 (%r9), %zmm6
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm5[4,5,6,7],zmm6[4,5,6,7]
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm6[4,5,6,7],zmm5[4,5,6,7]
 ; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm0[23],zero,zmm0[23,24,25,26],zero,zmm0[24],zero,zmm0[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm0[59],zero,zero,zero,zero,zmm0[62],zero,zmm0[60],zero,zero,zero,zero,zmm0[63],zero
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm6[25],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm6[59],zero,zero,zero,zero,zmm6[62],zero,zmm6[60],zero,zero,zero,zero,zmm6[63],zero,zmm6[61]
-; AVX512BW-FCP-NEXT:    vporq %zmm0, %zmm6, %zmm0
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm5[25],zero,zmm5[23],zero,zero,zero,zero,zmm5[26],zero,zmm5[24],zero,zero,zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm5[59],zero,zero,zero,zero,zmm5[62],zero,zmm5[60],zero,zero,zero,zero,zmm5[63],zero,zmm5[61]
+; AVX512BW-FCP-NEXT:    vporq %zmm0, %zmm5, %zmm0
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
 ; AVX512BW-FCP-NEXT:    movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k1
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm29 {%k1}
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm28 {%k1}
 ; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm0 = [27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28,30,29,30,31,31,30,30,31,30,29,30,31,31,30,30,31]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm0, %zmm0
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm0, %zmm0
 ; AVX512BW-FCP-NEXT:    movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm29 {%k2}
-; AVX512BW-FCP-NEXT:    vpshufb %xmm26, %xmm4, %xmm0
-; AVX512BW-FCP-NEXT:    vpshufb %xmm27, %xmm3, %xmm6
-; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm6, %xmm0
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm28 {%k2}
+; AVX512BW-FCP-NEXT:    vpshufb %xmm26, %xmm31, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb %xmm27, %xmm3, %xmm5
+; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm5, %xmm0
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm31[8],xmm3[8],xmm31[9],xmm3[9],xmm31[10],xmm3[10],xmm31[11],xmm3[11],xmm31[12],xmm3[12],xmm31[13],xmm3[13],xmm31[14],xmm3[14],xmm31[15],xmm3[15]
 ; AVX512BW-FCP-NEXT:    vpshufb %xmm19, %xmm3, %xmm3
 ; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm3[0,1,0,1]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm21, %xmm28, %xmm3
-; AVX512BW-FCP-NEXT:    vpshufb %xmm22, %xmm8, %xmm4
-; AVX512BW-FCP-NEXT:    vpor %xmm3, %xmm4, %xmm3
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm28[8],xmm8[8],xmm28[9],xmm8[9],xmm28[10],xmm8[10],xmm28[11],xmm8[11],xmm28[12],xmm8[12],xmm28[13],xmm8[13],xmm28[14],xmm8[14],xmm28[15],xmm8[15]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm17, %xmm4, %xmm4
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,0,1],zmm4[0,1,0,1]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm21, %xmm16, %xmm3
+; AVX512BW-FCP-NEXT:    vpshufb %xmm22, %xmm7, %xmm5
+; AVX512BW-FCP-NEXT:    vpor %xmm3, %xmm5, %xmm3
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm16[8],xmm7[8],xmm16[9],xmm7[9],xmm16[10],xmm7[10],xmm16[11],xmm7[11],xmm16[12],xmm7[12],xmm16[13],xmm7[13],xmm16[14],xmm7[14],xmm16[15],xmm7[15]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm17, %xmm5, %xmm5
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,0,1],zmm5[0,1,0,1]
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm3 {%k1}
-; AVX512BW-FCP-NEXT:    vpshufb %xmm24, %xmm30, %xmm0
-; AVX512BW-FCP-NEXT:    vpshufb %xmm25, %xmm31, %xmm4
-; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm4, %xmm0
-; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm31[8],xmm30[8],xmm31[9],xmm30[9],xmm31[10],xmm30[10],xmm31[11],xmm30[11],xmm31[12],xmm30[12],xmm31[13],xmm30[13],xmm31[14],xmm30[14],xmm31[15],xmm30[15]
-; AVX512BW-FCP-NEXT:    vpshufb %xmm20, %xmm4, %xmm4
-; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm4[0,1,0,1]
-; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm4 = [18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20,20,21,20,21,21,22,21,22,20,21,20,21,21,22,21,22]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm4, %zmm4
+; AVX512BW-FCP-NEXT:    vpshufb %xmm24, %xmm29, %xmm0
+; AVX512BW-FCP-NEXT:    vpshufb %xmm25, %xmm30, %xmm5
+; AVX512BW-FCP-NEXT:    vpor %xmm0, %xmm5, %xmm0
+; AVX512BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm30[8],xmm29[8],xmm30[9],xmm29[9],xmm30[10],xmm29[10],xmm30[11],xmm29[11],xmm30[12],xmm29[12],xmm30[13],xmm29[13],xmm30[14],xmm29[14],xmm30[15],xmm29[15]
+; AVX512BW-FCP-NEXT:    vpshufb %xmm20, %xmm5, %xmm5
+; AVX512BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm5[0,1,0,1]
+; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm5 = [18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20,20,21,20,21,21,22,21,22,20,21,20,21,21,22,21,22]
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm5, %zmm5
 ; AVX512BW-FCP-NEXT:    movabsq $290499906672591364, %rax # imm = 0x408102040810204
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
+; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm5, %zmm0 {%k2}
 ; AVX512BW-FCP-NEXT:    movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm3 {%k2}
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm12[18],zero,zmm12[18,19,20,21],zero,zmm12[19],zero,zmm12[25,26,27,22],zero,zmm12[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm12[56,57],zero,zmm12[55],zero,zmm12[53,54,55,58],zero,zmm12[56],zero,zmm12[60,61,58,59]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm4 = zmm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm13[18],zero,zero,zero,zero,zmm13[21],zero,zmm13[19],zero,zero,zero,zero,zmm13[22],zero,zmm13[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm13[57],zero,zmm13[55],zero,zero,zero,zero,zmm13[58],zero,zmm13[56],zero,zero,zero,zero
-; AVX512BW-FCP-NEXT:    vporq %zmm0, %zmm4, %zmm0
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm11, %zmm1, %zmm1
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm11[18],zero,zmm11[18,19,20,21],zero,zmm11[19],zero,zmm11[25,26,27,22],zero,zmm11[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm11[56,57],zero,zmm11[55],zero,zmm11[53,54,55,58],zero,zmm11[56],zero,zmm11[60,61,58,59]
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm12[18],zero,zero,zero,zero,zmm12[21],zero,zmm12[19],zero,zero,zero,zero,zmm12[22],zero,zmm12[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm12[57],zero,zmm12[55],zero,zero,zero,zero,zmm12[58],zero,zmm12[56],zero,zero,zero,zero
+; AVX512BW-FCP-NEXT:    vporq %zmm0, %zmm5, %zmm0
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm10, %zmm1, %zmm1
 ; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm1[18,19,20,21],zero,zmm1[19],zero,zmm1[21,20,21,22],zero,zmm1[20],zero,zmm1[22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,57],zero,zmm1[55],zero,zero,zero,zero,zmm1[58],zero,zmm1[56],zero,zero,zero,zero,zmm1[59],zero
-; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm10, %zmm2, %zmm2
+; AVX512BW-FCP-NEXT:    vinserti64x4 $1, %ymm9, %zmm2, %zmm2
 ; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm2[21],zero,zmm2[19],zero,zero,zero,zero,zmm2[22],zero,zmm2[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm2[55],zero,zero,zero,zero,zmm2[58],zero,zmm2[56],zero,zero,zero,zero,zmm2[59],zero,zmm2[57]
 ; AVX512BW-FCP-NEXT:    vporq %zmm1, %zmm2, %zmm1
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm15[20],zero,zmm15[18],zero,zmm15[20,21,20,21],zero,zmm15[19],zero,zmm15[19,20,21,22],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm15[56,57,56,57],zero,zmm15[55],zero,zmm15[55,56,57,58],zero,zmm15[56],zero,zmm15[62,63]
-; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm14[20],zero,zmm14[18],zero,zero,zero,zero,zmm14[21],zero,zmm14[19],zero,zero,zero,zero,zmm14[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm14[57],zero,zmm14[55],zero,zero,zero,zero,zmm14[58],zero,zmm14[56],zero,zero
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm14[20],zero,zmm14[18],zero,zmm14[20,21,20,21],zero,zmm14[19],zero,zmm14[19,20,21,22],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm14[56,57,56,57],zero,zmm14[55],zero,zmm14[55,56,57,58],zero,zmm14[56],zero,zmm14[62,63]
+; AVX512BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm13[20],zero,zmm13[18],zero,zero,zero,zero,zmm13[21],zero,zmm13[19],zero,zero,zero,zero,zmm13[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm13[57],zero,zmm13[55],zero,zero,zero,zero,zmm13[58],zero,zmm13[56],zero,zero
 ; AVX512BW-FCP-NEXT:    vporq %zmm1, %zmm2, %zmm1
 ; AVX512BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm2 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12]
-; AVX512BW-FCP-NEXT:    vpermw %zmm16, %zmm2, %zmm2
+; AVX512BW-FCP-NEXT:    vpermw %zmm15, %zmm2, %zmm2
 ; AVX512BW-FCP-NEXT:    vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
 ; AVX512BW-FCP-NEXT:    movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
 ; AVX512BW-FCP-NEXT:    kmovq %rax, %k1
@@ -10363,12 +10363,12 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
 ; AVX512BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm5, 320(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm4, 320(%rax)
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm3, 256(%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm29, 384(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm28, 384(%rax)
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm23, 192(%rax)
 ; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm9, 64(%rax)
+; AVX512BW-FCP-NEXT:    vmovdqa64 %zmm8, 64(%rax)
 ; AVX512BW-FCP-NEXT:    vzeroupper
 ; AVX512BW-FCP-NEXT:    retq
 ;
@@ -10653,14 +10653,14 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP-LABEL: store_i8_stride7_vf64:
 ; AVX512DQ-BW-FCP:       # %bb.0:
 ; AVX512DQ-BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rax), %zmm16
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rax), %zmm15
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rdx), %ymm0
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm13 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm13, %ymm0, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm12 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm0, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm14 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm14, %ymm2, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm1, %ymm3, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm13 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm13, %ymm2, %ymm4
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm1, %ymm4, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm3, %ymm0, %ymm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128]
@@ -10670,105 +10670,106 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rdi), %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm2 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm2, %ymm1, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rsi), %ymm5
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm9 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm9, %ymm5, %ymm6
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm3, %ymm6, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm1, %ymm1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm5, %ymm5
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm1, %ymm5, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm2, %ymm1, %ymm4
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rsi), %ymm6
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm8 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm8, %ymm6, %ymm5
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm4, %ymm5, %ymm4
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm3, %ymm1, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm3, %ymm6, %ymm6
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm1, %ymm6, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm3, %zmm5
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm1, %zmm4, %zmm4
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %r10, %k1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm5 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%r9), %ymm3
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm4 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%r9), %ymm9
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm0 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm0, %ymm3, %ymm6
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm0, %ymm9, %ymm6
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%r8), %ymm7
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm1, %ymm7, %ymm8
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm6, %ymm8, %ymm8
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm7, %ymm10
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm4, %ymm3, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm3, %ymm10, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm3, %zmm8, %zmm3
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm1, %ymm7, %ymm10
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm6, %ymm10, %ymm10
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,24,25,24,25,128,23,128,23,24,25,26,128,24,128,30,31]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm3, %ymm7, %ymm11
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm3, %ymm9, %ymm9
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm11, %ymm9, %ymm9
+; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm9, %zmm10, %zmm9
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rax), %ymm10
-; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm4 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %ymm10, %ymm4, %ymm11
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} ymm11 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %ymm10, %ymm11, %ymm11
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
 ; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm10
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $145249953336295682, %rax # imm = 0x204081020408102
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm10, %zmm3 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm10, %zmm9 {%k2}
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $-4357498600088870461, %rax # imm = 0xC3870E1C3870E1C3
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm3, %zmm5 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdi), %ymm10
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm2, %ymm10, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rsi), %ymm11
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm9, %ymm11, %ymm3
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rdi), %xmm8
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rsi), %xmm18
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm9 = xmm18[8],xmm3[8],xmm18[9],xmm3[9],xmm18[10],xmm3[10],xmm18[11],xmm3[11],xmm18[12],xmm3[12],xmm18[13],xmm3[13],xmm18[14],xmm3[14],xmm18[15],xmm3[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm9, %zmm4 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rdi), %ymm9
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm2, %ymm9, %ymm2
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa (%rsi), %ymm10
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm8, %ymm10, %ymm8
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm2, %ymm8, %ymm2
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rdi), %xmm18
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rdi), %xmm7
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rsi), %xmm23
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm16
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm23[8],xmm18[8],xmm23[9],xmm18[9],xmm23[10],xmm18[10],xmm23[11],xmm18[11],xmm23[12],xmm18[12],xmm23[13],xmm18[13],xmm23[14],xmm18[14],xmm23[15],xmm18[15]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm17 = [2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm17, %xmm9, %xmm9
-; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,1,0,1]
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm9, %zmm9
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm17, %xmm8, %xmm8
+; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,1,0,1]
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm8, %zmm8
+; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm12, %ymm11, %ymm2
 ; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm13, %ymm12, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm14, %ymm13, %ymm14
-; AVX512DQ-BW-FCP-NEXT:    vpor %ymm2, %ymm14, %ymm2
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm13[8],xmm12[8],xmm13[9],xmm12[9],xmm13[10],xmm12[10],xmm13[11],xmm12[11],xmm13[12],xmm12[12],xmm13[13],xmm12[13],xmm13[14],xmm12[14],xmm13[15],xmm12[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm13, %ymm12, %ymm13
+; AVX512DQ-BW-FCP-NEXT:    vpor %ymm2, %ymm13, %ymm2
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm13 = xmm12[8],xmm11[8],xmm12[9],xmm11[9],xmm12[10],xmm11[10],xmm12[11],xmm11[11],xmm12[12],xmm11[12],xmm12[13],xmm11[13],xmm12[14],xmm11[14],xmm12[15],xmm11[15]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm19 = [6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm19, %xmm14, %xmm14
-; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[0,1,0,1]
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm14, %zmm2
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm19, %xmm13, %xmm13
+; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[0,1,0,1]
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm2, %zmm13, %zmm2
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $435749860008887046, %rax # imm = 0x60C183060C18306
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm2, %zmm9 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm2, %zmm8 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = mem[0,1,2,3,0,1,2,3]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm0, %ymm13, %ymm0
 ; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm14 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm0, %ymm14, %ymm0
-; AVX512DQ-BW-FCP-NEXT:    vbroadcasti64x4 {{.*#+}} zmm15 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm1, %ymm15, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %ymm1, %ymm14, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm15[8],xmm14[8],xmm15[9],xmm14[9],xmm15[10],xmm14[10],xmm15[11],xmm14[11],xmm15[12],xmm14[12],xmm15[13],xmm14[13],xmm15[14],xmm14[14],xmm15[15],xmm14[15]
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm20 = [u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm20, %xmm1, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
 ; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6,6,1,6,0,7,1,0,7,14,9,14,8,15,9,8,15]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $2323999253380730912, %rax # imm = 0x2040810204081020
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k2}
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $4066998693416279096, %rax # imm = 0x3870E1C3870E1C38
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm9 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm8 {%k2}
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm21 = [u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm21, %xmm18, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm21, %xmm23, %xmm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm22 = [u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm22, %xmm3, %xmm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm22, %xmm18, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm18[0],xmm3[1],xmm18[1],xmm3[2],xmm18[2],xmm3[3],xmm18[3],xmm3[4],xmm18[4],xmm3[5],xmm18[5],xmm3[6],xmm18[6],xmm3[7],xmm18[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm18[0],xmm23[0],xmm18[1],xmm23[1],xmm18[2],xmm23[2],xmm18[3],xmm23[3],xmm18[4],xmm23[4],xmm18[5],xmm23[5],xmm18[6],xmm23[6],xmm18[7],xmm23[7]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm18 = zmm1[0,1,0,1],zmm0[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm26 = [u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm26, %xmm13, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm26, %xmm12, %xmm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm27 = [u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm27, %xmm12, %xmm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm27, %xmm11, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm12[0],xmm13[0],xmm12[1],xmm13[1],xmm12[2],xmm13[2],xmm12[3],xmm13[3],xmm12[4],xmm13[4],xmm12[5],xmm13[5],xmm12[6],xmm13[6],xmm12[7],xmm13[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3],xmm11[4],xmm12[4],xmm11[5],xmm12[5],xmm11[6],xmm12[6],xmm11[7],xmm12[7]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm23 = [4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm23, %xmm1, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
@@ -10776,48 +10777,47 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm18 {%k2}
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm24 = [128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm24, %xmm14, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm24, %xmm13, %xmm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm25 = [4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm25, %xmm15, %xmm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm25, %xmm14, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm1, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3],xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm29 = [u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm29, %xmm1, %xmm1
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 {{.*#+}} xmm28 = [u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm28, %xmm1, %xmm1
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,0,1],zmm0[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rsi), %xmm28
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm18 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm28[0],xmm8[1],xmm28[1],xmm8[2],xmm28[2],xmm8[3],xmm28[3],xmm8[4],xmm28[4],xmm8[5],xmm28[5],xmm8[6],xmm28[6],xmm8[7],xmm28[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm7[0],xmm16[0],xmm7[1],xmm16[1],xmm7[2],xmm16[2],xmm7[3],xmm16[3],xmm7[4],xmm16[4],xmm7[5],xmm16[5],xmm7[6],xmm16[6],xmm7[7],xmm16[7]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[28,29,30],zero,ymm10[28],zero,ymm10[30,31,30,31],zero,ymm10[29],zero,ymm10[31,28,29]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm9[28,29,30],zero,ymm9[28],zero,ymm9[30,31,30,31],zero,ymm9[29],zero,ymm9[31,28,29]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm2 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero,ymm10[29],zero,zero,zero
 ; AVX512DQ-BW-FCP-NEXT:    vpor %ymm1, %ymm2, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[2,3,2,3],zmm0[0,1,0,1]
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rcx), %xmm4
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%rcx), %xmm31
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa 32(%rdx), %xmm3
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3],xmm3[4],xmm4[4],xmm3[5],xmm4[5],xmm3[6],xmm4[6],xmm3[7],xmm4[7]
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm31[0],xmm3[1],xmm31[1],xmm3[2],xmm31[2],xmm3[3],xmm31[3],xmm3[4],xmm31[4],xmm3[5],xmm31[5],xmm3[6],xmm31[6],xmm3[7],xmm31[7]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm23, %xmm1, %xmm1
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm23 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm13[30],zero,ymm13[28],zero,zero,zero,zero,ymm13[31],zero,ymm13[29],zero
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm30 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vporq %ymm23, %ymm30, %ymm23
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm23 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27],zero,zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29],zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vporq %ymm23, %ymm29, %ymm23
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm23 = zmm23[2,3,2,3],zmm1[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm23 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm30
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm31
-; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm31[0],xmm30[0],xmm31[1],xmm30[1],xmm31[2],xmm30[2],xmm31[3],xmm30[3],xmm31[4],xmm30[4],xmm31[5],xmm30[5],xmm31[6],xmm30[6],xmm31[7],xmm30[7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm29, %xmm0, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm14[27],zero,zero,zero,zero,ymm14[30],zero,ymm14[28],zero,zero,zero,zero,ymm14[31],zero
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm15[27],zero,zero,zero,zero,ymm15[30],zero,ymm15[28],zero,zero,zero,zero,ymm15[31],zero,ymm15[29]
-; AVX512DQ-BW-FCP-NEXT:    vporq %ymm1, %ymm29, %ymm1
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r9), %xmm29
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 32(%r8), %xmm30
+; AVX512DQ-BW-FCP-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm30[0],xmm29[0],xmm30[1],xmm29[1],xmm30[2],xmm29[2],xmm30[3],xmm29[3],xmm30[4],xmm29[4],xmm30[5],xmm29[5],xmm30[6],xmm29[6],xmm30[7],xmm29[7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm28, %xmm0, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm1 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm13[27],zero,zero,zero,zero,ymm13[30],zero,ymm13[28],zero,zero,zero,zero,ymm13[31],zero
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} ymm28 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm14[27],zero,zero,zero,zero,ymm14[30],zero,ymm14[28],zero,zero,zero,zero,ymm14[31],zero,ymm14[29]
+; AVX512DQ-BW-FCP-NEXT:    vporq %ymm1, %ymm28, %ymm1
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[2,3,2,3],zmm0[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm1 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm1, %zmm1
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm1, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $580999813345182728, %rax # imm = 0x810204081020408
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k1
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
@@ -10826,82 +10826,82 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm23 {%k1}
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rdx), %zmm0
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rcx), %zmm1
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm29 = zmm0[4,5,6,7],zmm1[4,5,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm28 = zmm0[4,5,6,7],zmm1[4,5,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[4,5,6,7],zmm0[4,5,6,7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zmm29[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,zmm29[23],zero,zmm29[21,22,23,26],zero,zmm29[24],zero,zmm29[28,29,26,27,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,59],zero,zero,zero,zero,zmm29[62],zero,zmm29[60],zero,zero,zero,zero,zmm29[63],zero,zmm29[61],zero
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,zmm28[23],zero,zmm28[21,22,23,26],zero,zmm28[24],zero,zmm28[28,29,26,27,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,59],zero,zero,zero,zero,zmm28[62],zero,zmm28[60],zero,zero,zero,zero,zmm28[63],zero,zmm28[61],zero
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm0[25],zero,zmm0[23],zero,zero,zero,zero,zmm0[26],zero,zmm0[24],zero,zero,zero,zero,zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm0[62],zero,zmm0[60],zero,zero,zero,zero,zmm0[63],zero,zmm0[61],zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vporq %zmm1, %zmm0, %zmm29
+; AVX512DQ-BW-FCP-NEXT:    vporq %zmm1, %zmm0, %zmm28
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rdi), %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%rsi), %zmm2
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm2[4,5,6,7],zmm1[4,5,6,7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zero,zero,zmm6[27],zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,60,61,62],zero,zmm6[60],zero,zmm6[62,63,62,63],zero,zmm6[61],zero,zmm6[63,60,61]
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm1[4,5,6,7],zmm2[4,5,6,7]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm7 = zmm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm7[23],zero,zero,zero,zero,zmm7[26],zero,zmm7[24],zero,zero,zero,zero,zmm7[27],zero,zmm7[25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zmm7[62],zero,zmm7[60],zero,zero,zero,zero,zmm7[63],zero,zmm7[61],zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vporq %zmm6, %zmm7, %zmm6
-; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm7 = zmm29[2,3,2,3,6,7,6,7]
-; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm29 = zmm6[2,3,2,3,6,7,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm2[4,5,6,7],zmm1[4,5,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25],zero,zmm5[23],zero,zero,zero,zero,zmm5[26],zero,zmm5[24],zero,zero,zero,zero,zmm5[27],zero,zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,60,61,62],zero,zmm5[60],zero,zmm5[62,63,62,63],zero,zmm5[61],zero,zmm5[63,60,61]
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm1[4,5,6,7],zmm2[4,5,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zero,zero,zmm6[27],zero,zmm6[25,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zmm6[62],zero,zmm6[60],zero,zero,zero,zero,zmm6[63],zero,zmm6[61],zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vporq %zmm5, %zmm6, %zmm5
+; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm6 = zmm28[2,3,2,3,6,7,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm28 = zmm5[2,3,2,3,6,7,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $1742999440035548184, %rax # imm = 0x183060C183060C18
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm7, %zmm29 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%r8), %zmm6
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%r9), %zmm7
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm6[4,5,6,7],zmm7[4,5,6,7]
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm6 = zmm7[4,5,6,7],zmm6[4,5,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm6, %zmm28 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%r8), %zmm5
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 (%r9), %zmm6
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm5[4,5,6,7],zmm6[4,5,6,7]
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm6[4,5,6,7],zmm5[4,5,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zmm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm0[23],zero,zmm0[23,24,25,26],zero,zmm0[24],zero,zmm0[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm0[59],zero,zero,zero,zero,zmm0[62],zero,zmm0[60],zero,zero,zero,zero,zmm0[63],zero
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm6 = zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm6[25],zero,zmm6[23],zero,zero,zero,zero,zmm6[26],zero,zmm6[24],zero,zero,zmm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm6[59],zero,zero,zero,zero,zmm6[62],zero,zmm6[60],zero,zero,zero,zero,zmm6[63],zero,zmm6[61]
-; AVX512DQ-BW-FCP-NEXT:    vporq %zmm0, %zmm6, %zmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm5[25],zero,zmm5[23],zero,zero,zero,zero,zmm5[26],zero,zmm5[24],zero,zero,zmm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm5[59],zero,zero,zero,zero,zmm5[62],zero,zmm5[60],zero,zero,zero,zero,zmm5[63],zero,zmm5[61]
+; AVX512DQ-BW-FCP-NEXT:    vporq %zmm0, %zmm5, %zmm0
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k1
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm29 {%k1}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm28 {%k1}
 ; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm0 = [27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28,30,29,30,31,31,30,30,31,30,29,30,31,31,30,30,31]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm0, %zmm0
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm0, %zmm0
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm29 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm26, %xmm4, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm27, %xmm3, %xmm6
-; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm6, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm28 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm26, %xmm31, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm27, %xmm3, %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm5, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm3 = xmm31[8],xmm3[8],xmm31[9],xmm3[9],xmm31[10],xmm3[10],xmm31[11],xmm3[11],xmm31[12],xmm3[12],xmm31[13],xmm3[13],xmm31[14],xmm3[14],xmm31[15],xmm3[15]
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm19, %xmm3, %xmm3
 ; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm3[0,1,0,1]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm21, %xmm28, %xmm3
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm22, %xmm8, %xmm4
-; AVX512DQ-BW-FCP-NEXT:    vpor %xmm3, %xmm4, %xmm3
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm28[8],xmm8[8],xmm28[9],xmm8[9],xmm28[10],xmm8[10],xmm28[11],xmm8[11],xmm28[12],xmm8[12],xmm28[13],xmm8[13],xmm28[14],xmm8[14],xmm28[15],xmm8[15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm17, %xmm4, %xmm4
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,0,1],zmm4[0,1,0,1]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm21, %xmm16, %xmm3
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm22, %xmm7, %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vpor %xmm3, %xmm5, %xmm3
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm16[8],xmm7[8],xmm16[9],xmm7[9],xmm16[10],xmm7[10],xmm16[11],xmm7[11],xmm16[12],xmm7[12],xmm16[13],xmm7[13],xmm16[14],xmm7[14],xmm16[15],xmm7[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm17, %xmm5, %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,0,1],zmm5[0,1,0,1]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm3 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm24, %xmm30, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm25, %xmm31, %xmm4
-; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm4, %xmm0
-; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm31[8],xmm30[8],xmm31[9],xmm30[9],xmm31[10],xmm30[10],xmm31[11],xmm30[11],xmm31[12],xmm30[12],xmm31[13],xmm30[13],xmm31[14],xmm30[14],xmm31[15],xmm30[15]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm20, %xmm4, %xmm4
-; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm4[0,1,0,1]
-; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm4 = [18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20,20,21,20,21,21,22,21,22,20,21,20,21,21,22,21,22]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm4, %zmm4
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm24, %xmm29, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm25, %xmm30, %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vpor %xmm0, %xmm5, %xmm0
+; AVX512DQ-BW-FCP-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm30[8],xmm29[8],xmm30[9],xmm29[9],xmm30[10],xmm29[10],xmm30[11],xmm29[11],xmm30[12],xmm29[12],xmm30[13],xmm29[13],xmm30[14],xmm29[14],xmm30[15],xmm29[15]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb %xmm20, %xmm5, %xmm5
+; AVX512DQ-BW-FCP-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,0,1],zmm5[0,1,0,1]
+; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm5 = [18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20,20,21,20,21,21,22,21,22,20,21,20,21,21,22,21,22]
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm5, %zmm5
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $290499906672591364, %rax # imm = 0x408102040810204
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
-; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm4, %zmm0 {%k2}
+; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm5, %zmm0 {%k2}
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k2
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm0, %zmm3 {%k2}
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm12[18],zero,zmm12[18,19,20,21],zero,zmm12[19],zero,zmm12[25,26,27,22],zero,zmm12[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm12[56,57],zero,zmm12[55],zero,zmm12[53,54,55,58],zero,zmm12[56],zero,zmm12[60,61,58,59]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm4 = zmm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm13[18],zero,zero,zero,zero,zmm13[21],zero,zmm13[19],zero,zero,zero,zero,zmm13[22],zero,zmm13[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm13[57],zero,zmm13[55],zero,zero,zero,zero,zmm13[58],zero,zmm13[56],zero,zero,zero,zero
-; AVX512DQ-BW-FCP-NEXT:    vporq %zmm0, %zmm4, %zmm0
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm11, %zmm1, %zmm1
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm0 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm11[18],zero,zmm11[18,19,20,21],zero,zmm11[19],zero,zmm11[25,26,27,22],zero,zmm11[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm11[56,57],zero,zmm11[55],zero,zmm11[53,54,55,58],zero,zmm11[56],zero,zmm11[60,61,58,59]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm5 = zmm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm12[18],zero,zero,zero,zero,zmm12[21],zero,zmm12[19],zero,zero,zero,zero,zmm12[22],zero,zmm12[20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zmm12[57],zero,zmm12[55],zero,zero,zero,zero,zmm12[58],zero,zmm12[56],zero,zero,zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vporq %zmm0, %zmm5, %zmm0
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm10, %zmm1, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm1[18,19,20,21],zero,zmm1[19],zero,zmm1[21,20,21,22],zero,zmm1[20],zero,zmm1[22,23,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,57],zero,zmm1[55],zero,zero,zero,zero,zmm1[58],zero,zmm1[56],zero,zero,zero,zero,zmm1[59],zero
-; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm10, %zmm2, %zmm2
+; AVX512DQ-BW-FCP-NEXT:    vinserti64x4 $1, %ymm9, %zmm2, %zmm2
 ; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm2[21],zero,zmm2[19],zero,zero,zero,zero,zmm2[22],zero,zmm2[20],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm2[55],zero,zero,zero,zero,zmm2[58],zero,zmm2[56],zero,zero,zero,zero,zmm2[59],zero,zmm2[57]
 ; AVX512DQ-BW-FCP-NEXT:    vporq %zmm1, %zmm2, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm15[20],zero,zmm15[18],zero,zmm15[20,21,20,21],zero,zmm15[19],zero,zmm15[19,20,21,22],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm15[56,57,56,57],zero,zmm15[55],zero,zmm15[55,56,57,58],zero,zmm15[56],zero,zmm15[62,63]
-; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm14[20],zero,zmm14[18],zero,zero,zero,zero,zmm14[21],zero,zmm14[19],zero,zero,zero,zero,zmm14[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm14[57],zero,zmm14[55],zero,zero,zero,zero,zmm14[58],zero,zmm14[56],zero,zero
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm1 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm14[20],zero,zmm14[18],zero,zmm14[20,21,20,21],zero,zmm14[19],zero,zmm14[19,20,21,22],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zmm14[56,57,56,57],zero,zmm14[55],zero,zmm14[55,56,57,58],zero,zmm14[56],zero,zmm14[62,63]
+; AVX512DQ-BW-FCP-NEXT:    vpshufb {{.*#+}} zmm2 = zmm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm13[20],zero,zmm13[18],zero,zero,zero,zero,zmm13[21],zero,zmm13[19],zero,zero,zero,zero,zmm13[22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm13[57],zero,zmm13[55],zero,zero,zero,zero,zmm13[58],zero,zmm13[56],zero,zero
 ; AVX512DQ-BW-FCP-NEXT:    vporq %zmm1, %zmm2, %zmm1
 ; AVX512DQ-BW-FCP-NEXT:    vpmovsxbw {{.*#+}} zmm2 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12]
-; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm16, %zmm2, %zmm2
+; AVX512DQ-BW-FCP-NEXT:    vpermw %zmm15, %zmm2, %zmm2
 ; AVX512DQ-BW-FCP-NEXT:    vpermq {{.*#+}} zmm1 = zmm1[2,3,2,3,6,7,6,7]
 ; AVX512DQ-BW-FCP-NEXT:    movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
 ; AVX512DQ-BW-FCP-NEXT:    kmovq %rax, %k1
@@ -10911,12 +10911,12 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
 ; AVX512DQ-BW-FCP-NEXT:    movq {{[0-9]+}}(%rsp), %rax
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm5, 320(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm4, 320(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm3, 256(%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm29, 384(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm28, 384(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm23, 192(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm9, 64(%rax)
+; AVX512DQ-BW-FCP-NEXT:    vmovdqa64 %zmm8, 64(%rax)
 ; AVX512DQ-BW-FCP-NEXT:    vzeroupper
 ; AVX512DQ-BW-FCP-NEXT:    retq
   %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64



More information about the llvm-commits mailing list