[llvm] 539e60c - [X86] X86FixupVectorConstantsPass - consistently use non-DQI 128/256-bit subvector broadcasts

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Thu Nov 30 10:34:30 PST 2023


Author: Simon Pilgrim
Date: 2023-11-30T18:33:52Z
New Revision: 539e60c34a0a960f55eacf764f8c74d6c6c9d2b3

URL: https://github.com/llvm/llvm-project/commit/539e60c34a0a960f55eacf764f8c74d6c6c9d2b3
DIFF: https://github.com/llvm/llvm-project/commit/539e60c34a0a960f55eacf764f8c74d6c6c9d2b3.diff

LOG: [X86] X86FixupVectorConstantsPass - consistently use non-DQI 128/256-bit subvector broadcasts

Without the predicate there's no benefit to using the DQI variants instead of the default AVX512F instructions

Added: 
    

Modified: 
    llvm/lib/Target/X86/X86FixupVectorConstants.cpp
    llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-7.ll
    llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-8.ll
    llvm/test/CodeGen/X86/vector-interleaved-load-i32-stride-8.ll
    llvm/test/CodeGen/X86/vector-interleaved-load-i64-stride-7.ll
    llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-3.ll
    llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-7.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-7.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-6.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-7.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-8.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-5.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-6.ll
    llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
    llvm/test/CodeGen/X86/vector-lzcnt-512.ll
    llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast.ll
    llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast_from_memory.ll

Removed: 
    


################################################################################
diff  --git a/llvm/lib/Target/X86/X86FixupVectorConstants.cpp b/llvm/lib/Target/X86/X86FixupVectorConstants.cpp
index 4415c20496a1590..99e92bbcf996d2a 100644
--- a/llvm/lib/Target/X86/X86FixupVectorConstants.cpp
+++ b/llvm/lib/Target/X86/X86FixupVectorConstants.cpp
@@ -297,17 +297,16 @@ bool X86FixupVectorConstantsPass::processInstruction(MachineFunction &MF,
   case X86::VMOVAPSZ256rm:
   case X86::VMOVUPDZ256rm:
   case X86::VMOVUPSZ256rm:
-    return ConvertToBroadcast(
-        0, HasDQI ? X86::VBROADCASTF64X2Z128rm : X86::VBROADCASTF32X4Z256rm,
-        X86::VBROADCASTSDZ256rm, X86::VBROADCASTSSZ256rm, 0, 0, 1);
+    return ConvertToBroadcast(0, X86::VBROADCASTF32X4Z256rm,
+                              X86::VBROADCASTSDZ256rm, X86::VBROADCASTSSZ256rm,
+                              0, 0, 1);
   case X86::VMOVAPDZrm:
   case X86::VMOVAPSZrm:
   case X86::VMOVUPDZrm:
   case X86::VMOVUPSZrm:
-    return ConvertToBroadcast(
-        HasDQI ? X86::VBROADCASTF32X8rm : X86::VBROADCASTF64X4rm,
-        HasDQI ? X86::VBROADCASTF64X2rm : X86::VBROADCASTF32X4rm,
-        X86::VBROADCASTSDZrm, X86::VBROADCASTSSZrm, 0, 0, 1);
+    return ConvertToBroadcast(X86::VBROADCASTF64X4rm, X86::VBROADCASTF32X4rm,
+                              X86::VBROADCASTSDZrm, X86::VBROADCASTSSZrm, 0, 0,
+                              1);
     /* Integer Loads */
   case X86::VMOVDQArm:
   case X86::VMOVDQUrm:
@@ -336,21 +335,18 @@ bool X86FixupVectorConstantsPass::processInstruction(MachineFunction &MF,
   case X86::VMOVDQA64Z256rm:
   case X86::VMOVDQU32Z256rm:
   case X86::VMOVDQU64Z256rm:
-    return ConvertToBroadcast(
-        0, HasDQI ? X86::VBROADCASTI64X2Z128rm : X86::VBROADCASTI32X4Z256rm,
-        X86::VPBROADCASTQZ256rm, X86::VPBROADCASTDZ256rm,
-        HasBWI ? X86::VPBROADCASTWZ256rm : 0,
-        HasBWI ? X86::VPBROADCASTBZ256rm : 0, 1);
+    return ConvertToBroadcast(0, X86::VBROADCASTI32X4Z256rm,
+                              X86::VPBROADCASTQZ256rm, X86::VPBROADCASTDZ256rm,
+                              HasBWI ? X86::VPBROADCASTWZ256rm : 0,
+                              HasBWI ? X86::VPBROADCASTBZ256rm : 0, 1);
   case X86::VMOVDQA32Zrm:
   case X86::VMOVDQA64Zrm:
   case X86::VMOVDQU32Zrm:
   case X86::VMOVDQU64Zrm:
-    return ConvertToBroadcast(
-        HasDQI ? X86::VBROADCASTI32X8rm : X86::VBROADCASTI64X4rm,
-        HasDQI ? X86::VBROADCASTI64X2rm : X86::VBROADCASTI32X4rm,
-        X86::VPBROADCASTQZrm, X86::VPBROADCASTDZrm,
-        HasBWI ? X86::VPBROADCASTWZrm : 0, HasBWI ? X86::VPBROADCASTBZrm : 0,
-        1);
+    return ConvertToBroadcast(X86::VBROADCASTI64X4rm, X86::VBROADCASTI32X4rm,
+                              X86::VPBROADCASTQZrm, X86::VPBROADCASTDZrm,
+                              HasBWI ? X86::VPBROADCASTWZrm : 0,
+                              HasBWI ? X86::VPBROADCASTBZrm : 0, 1);
   }
 
   auto ConvertToBroadcastAVX512 = [&](unsigned OpSrc32, unsigned OpSrc64) {

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-7.ll
index dc8fabe3a4329b7..893bff29b21e45a 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-7.ll
@@ -2794,307 +2794,156 @@ define void @load_i16_stride7_vf16(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512F-SLOW-NEXT:    vzeroupper
 ; AVX512F-SLOW-NEXT:    retq
 ;
-; AVX512F-ONLY-FAST-LABEL: load_i16_stride7_vf16:
-; AVX512F-ONLY-FAST:       # %bb.0:
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 64(%rdi), %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 128(%rdi), %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [2,5,9,12,2,5,9,12]
-; AVX512F-ONLY-FAST-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = [10,3,6,15,12,13,6,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,6,9,u,13,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,5,9,u,12,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [8,1,12,5,12,5,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [3,6,10,13,3,6,10,13]
-; AVX512F-ONLY-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm1, %zmm2, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 192(%rdi), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm2[0,1,0,2]
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm10 = [20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm13, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5,6],ymm5[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm3[6,7,12,13,2,3,16,17,30,31,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4,5],ymm5[6],ymm4[7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm12, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3],xmm14[4],xmm12[5],xmm14[6],xmm12[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[0,1,14,15,12,13,10,11,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm3, %ymm12, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm11[5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 160(%rdi), %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 128(%rdi), %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm12[0,1],ymm11[2],ymm12[3,4,5],ymm11[6],ymm12[7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0,1,2,3],xmm15[4],xmm14[5],xmm15[6],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[0,1,0,1,14,15,12,13,10,11,8,9,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vinserti128 $1, %xmm14, %ymm0, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,28,29]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm14[0,1,2,3,4,5,6],ymm13[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[0,1,6,7,8,9,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm4[0,1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3,4,5],xmm14[6],xmm15[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[2,3,0,1,14,15,12,13,10,11],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm6, %ymm14, %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm6[0],ymm13[1,2,3,4,5,6,7],ymm6[8],ymm13[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm13[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm12[0,1,2],ymm11[3],ymm12[4,5],ymm11[6],ymm12[7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3,4,5],xmm13[6],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,2,3,0,1,14,15,12,13,10,11,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [2,5,2,5,2,5,2,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm2, %ymm14, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5,4,7,8,9,10,11,12,13,12,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5,6],ymm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[2,3,4,5,10,11,16,17],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1],xmm14[2,3,4,5],xmm15[6],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[4,5,2,3,0,1,14,15,12,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm8[0],ymm13[1,2,3,4,5,6,7],ymm8[8],ymm13[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm13[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5],xmm14[6],xmm13[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,4,5,2,3,0,1,14,15,12,13,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm2[0,1,1,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm15 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,24,25]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm15 = ymm13[0,1,2,3,4,5,6],ymm15[7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm7
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm7 = xmm7[0],xmm13[1],xmm7[2],xmm13[3],xmm7[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm9[4,5,10,11,0,1,22,23],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[6,7,4,5,2,3,0,1,14,15],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm7, %ymm9, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm1, %zmm16, %zmm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = <0,3,7,10,14,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm7[0],ymm15[1,2,3,4,5,6,7],ymm7[8],ymm15[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0],ymm12[1],ymm11[2,3,4],ymm12[5],ymm11[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm11, %xmm12
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0],xmm11[1],xmm12[2],xmm11[3],xmm12[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm11 = xmm11[0,1,6,7,4,5,2,3,0,1,14,15,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vinserti128 $1, %xmm11, %ymm0, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm14, %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5,6],ymm10[7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm10 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [8,9,4,5,4,5,6,7,8,9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm10, %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm10[3,1,2,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm9, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm14[0,1],ymm9[2,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm9[0],ymm11[1,2,3,4,5,6,7],ymm9[8],ymm11[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <0,3,3,u,0,3,7,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm2, %ymm11, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,0,1,6,7,8,9,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,0,1,6,7,8,9,14,15,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm13[0,1,2,3,4],ymm11[5,6,7],ymm13[8,9,10,11,12],ymm11[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm12, %xmm14, %xmm12
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = <1,4,8,11,15,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [2,6,9,13,2,6,9,13]
-; AVX512F-ONLY-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm16 = <0,4,7,11,14,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[10,11,6,7,4,5,6,7,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm16, %zmm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,2,3,4,5,10,11,12,13,18,19,18,19,18,19,18,19,18,19,20,21,26,27,28,29]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1],ymm13[2,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm1, %zmm15, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm12[0],ymm11[1,2,3,4,5,6,7],ymm12[8],ymm11[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3],ymm11[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [0,4,7,0,0,4,7,0]
-; AVX512F-ONLY-FAST-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm2, %ymm12, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,2,3,4,5,10,11,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,2,3,4,5,10,11,12,13,u,u,u,u,u,u,u,u,18,19,20,21,26,27,28,29,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm14, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm2 = ymm5[0,1,2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7]
-; AVX512F-ONLY-FAST-NEXT:    vextracti128 $1, %ymm2, %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[u,u,u,u,u,u,u,u,10,11,6,7,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,4,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1,2,3,4,5,6,7],ymm0[8],ymm1[9,10,11,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm3, (%rsi)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm6, (%rdx)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm8, (%rcx)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm7, (%r8)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm9, (%r9)
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm11, (%rax)
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm0, (%rax)
-; AVX512F-ONLY-FAST-NEXT:    vzeroupper
-; AVX512F-ONLY-FAST-NEXT:    retq
-;
-; AVX512DQ-FAST-LABEL: load_i16_stride7_vf16:
-; AVX512DQ-FAST:       # %bb.0:
-; AVX512DQ-FAST-NEXT:    vmovdqa64 64(%rdi), %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 128(%rdi), %zmm1
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm16 = [2,5,9,12,2,5,9,12]
-; AVX512DQ-FAST-NEXT:    # ymm16 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = [10,3,6,15,12,13,6,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,6,9,u,13,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm8
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,5,9,u,12,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [8,1,12,5,12,5,14,15]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm3
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [3,6,10,13,3,6,10,13]
-; AVX512DQ-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm1, %zmm2, %zmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa 192(%rdi), %ymm2
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm2[0,1,0,2]
-; AVX512DQ-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm10 = [20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm13, %ymm5
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5,6],ymm5[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm3[6,7,12,13,2,3,16,17,30,31,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %ymm5
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4,5],ymm5[6],ymm4[7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm12, %xmm14
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3],xmm14[4],xmm12[5],xmm14[6],xmm12[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[0,1,14,15,12,13,10,11,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpor %ymm3, %ymm12, %ymm3
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm11[5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa 160(%rdi), %ymm11
-; AVX512DQ-FAST-NEXT:    vmovdqa 128(%rdi), %ymm12
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm12[0,1],ymm11[2],ymm12[3,4,5],ymm11[6],ymm12[7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0,1,2,3],xmm15[4],xmm14[5],xmm15[6],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[0,1,0,1,14,15,12,13,10,11,8,9,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vinserti128 $1, %xmm14, %ymm0, %ymm14
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,28,29]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm14[0,1,2,3,4,5,6],ymm13[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[0,1,6,7,8,9,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm4[0,1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3,4,5],xmm14[6],xmm15[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[2,3,0,1,14,15,12,13,10,11],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpor %ymm6, %ymm14, %ymm6
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm6[0],ymm13[1,2,3,4,5,6,7],ymm6[8],ymm13[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm13[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm12[0,1,2],ymm11[3],ymm12[4,5],ymm11[6],ymm12[7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3,4,5],xmm13[6],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,2,3,0,1,14,15,12,13,10,11,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [2,5,2,5,2,5,2,5]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm2, %ymm14, %ymm14
-; AVX512DQ-FAST-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5,4,7,8,9,10,11,12,13,12,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5,6],ymm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[2,3,4,5,10,11,16,17],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6,7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1],xmm14[2,3,4,5],xmm15[6],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[4,5,2,3,0,1,14,15,12,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm8
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm8[0],ymm13[1,2,3,4,5,6,7],ymm8[8],ymm13[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm13[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5],xmm14[6],xmm13[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,4,5,2,3,0,1,14,15,12,13,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm2[0,1,1,3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm15 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,24,25]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm15 = ymm13[0,1,2,3,4,5,6],ymm15[7]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm7
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm7 = xmm7[0],xmm13[1],xmm7[2],xmm13[3],xmm7[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm9[4,5,10,11,0,1,22,23],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[6,7,4,5,2,3,0,1,14,15],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpor %ymm7, %ymm9, %ymm7
-; AVX512DQ-FAST-NEXT:    vpermd %zmm1, %zmm16, %zmm13
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = <0,3,7,10,14,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm7[0],ymm15[1,2,3,4,5,6,7],ymm7[8],ymm15[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0],ymm12[1],ymm11[2,3,4],ymm12[5],ymm11[6,7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm11, %xmm12
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0],xmm11[1],xmm12[2],xmm11[3],xmm12[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm11 = xmm11[0,1,6,7,4,5,2,3,0,1,14,15,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vinserti128 $1, %xmm11, %ymm0, %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm14, %ymm10
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5,6],ymm10[7]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm10 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [8,9,4,5,4,5,6,7,8,9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm14
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm10, %xmm10
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm10[3,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm9, %ymm9
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm14[0,1],ymm9[2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm9[0],ymm11[1,2,3,4,5,6,7],ymm9[8],ymm11[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <0,3,3,u,0,3,7,u>
-; AVX512DQ-FAST-NEXT:    vpermd %ymm2, %ymm11, %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,0,1,6,7,8,9,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,0,1,6,7,8,9,14,15,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm13[0,1,2,3,4],ymm11[5,6,7],ymm13[8,9,10,11,12],ymm11[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm12, %xmm14, %xmm12
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = <1,4,8,11,15,u,u,u>
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [2,6,9,13,2,6,9,13]
-; AVX512DQ-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm16 = <0,4,7,11,14,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[10,11,6,7,4,5,6,7,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm16, %zmm13
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,2,3,4,5,10,11,12,13,18,19,18,19,18,19,18,19,18,19,20,21,26,27,28,29]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1],ymm13[2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm1, %zmm15, %zmm1
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm12[0],ymm11[1,2,3,4,5,6,7],ymm12[8],ymm11[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [0,4,7,0,0,4,7,0]
-; AVX512DQ-FAST-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm2, %ymm12, %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,2,3,4,5,10,11,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,2,3,4,5,10,11,12,13,u,u,u,u,u,u,u,u,18,19,20,21,26,27,28,29,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm14, %zmm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm2 = ymm5[0,1,2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7]
-; AVX512DQ-FAST-NEXT:    vextracti128 $1, %ymm2, %xmm4
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[u,u,u,u,u,u,u,u,10,11,6,7,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,4,6,7]
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1,2,3,4,5,6,7],ymm0[8],ymm1[9,10,11,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm3, (%rsi)
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm6, (%rdx)
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm8, (%rcx)
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm7, (%r8)
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm9, (%r9)
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm11, (%rax)
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm0, (%rax)
-; AVX512DQ-FAST-NEXT:    vzeroupper
-; AVX512DQ-FAST-NEXT:    retq
+; AVX512F-FAST-LABEL: load_i16_stride7_vf16:
+; AVX512F-FAST:       # %bb.0:
+; AVX512F-FAST-NEXT:    vmovdqa64 64(%rdi), %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa64 128(%rdi), %zmm1
+; AVX512F-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [2,5,9,12,2,5,9,12]
+; AVX512F-FAST-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = [10,3,6,15,12,13,6,15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,6,9,u,13,u,u,u>
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm8
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,5,9,u,12,u,u,u>
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [8,1,12,5,12,5,14,15]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm3
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [3,6,10,13,3,6,10,13]
+; AVX512F-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermd %zmm1, %zmm2, %zmm4
+; AVX512F-FAST-NEXT:    vmovdqa 192(%rdi), %ymm2
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm2[0,1,0,2]
+; AVX512F-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm10 = [20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27,20,21,26,27]
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm13, %ymm5
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm4[0,1,2,3,4,5,6],ymm5[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm3[6,7,12,13,2,3,16,17,30,31,u,u,u,u,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %ymm5
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm4[0,1],ymm5[2],ymm4[3,4,5],ymm5[6],ymm4[7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm12, %xmm14
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm12 = xmm12[0,1,2,3],xmm14[4],xmm12[5],xmm14[6],xmm12[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[0,1,14,15,12,13,10,11,8,9],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm12[u,u,u,u,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpor %ymm3, %ymm12, %ymm3
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm3 = ymm3[0,1,2,3,4],ymm11[5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqa 160(%rdi), %ymm11
+; AVX512F-FAST-NEXT:    vmovdqa 128(%rdi), %ymm12
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm12[0,1],ymm11[2],ymm12[3,4,5],ymm11[6],ymm12[7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0,1,2,3],xmm15[4],xmm14[5],xmm15[6],xmm14[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[0,1,0,1,14,15,12,13,10,11,8,9,u,u,u,u]
+; AVX512F-FAST-NEXT:    vinserti128 $1, %xmm14, %ymm0, %ymm14
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,28,29]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm14[0,1,2,3,4,5,6],ymm13[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm6[0,1,6,7,8,9,18,19],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm4[0,1,2],ymm5[3],ymm4[4,5],ymm5[6],ymm4[7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3,4,5],xmm14[6],xmm15[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[2,3,0,1,14,15,12,13,10,11],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpor %ymm6, %ymm14, %ymm6
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm6[0],ymm13[1,2,3,4,5,6,7],ymm6[8],ymm13[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm13[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm12[0,1,2],ymm11[3],ymm12[4,5],ymm11[6],ymm12[7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm14[0],xmm13[1],xmm14[2,3,4,5],xmm13[6],xmm14[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,2,3,0,1,14,15,12,13,10,11,u,u,u,u]
+; AVX512F-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [2,5,2,5,2,5,2,5]
+; AVX512F-FAST-NEXT:    vpermd %ymm2, %ymm14, %ymm14
+; AVX512F-FAST-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm14[0,1,2,3,4,5,4,7,8,9,10,11,12,13,12,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3,4,5,6],ymm14[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm8[2,3,4,5,10,11,16,17],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm5[0],ymm4[1],ymm5[2,3],ymm4[4],ymm5[5,6,7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm14, %xmm15
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm14 = xmm14[0],xmm15[1],xmm14[2,3,4,5],xmm15[6],xmm14[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm14[4,5,2,3,0,1,14,15,12,13],zero,zero,zero,zero,zero,zero,zero,zero,ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm8
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm8[0],ymm13[1,2,3,4,5,6,7],ymm8[8],ymm13[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm13[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm13 = xmm13[0],xmm14[1],xmm13[2,3,4,5],xmm14[6],xmm13[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[0,1,4,5,2,3,0,1,14,15,12,13,u,u,u,u]
+; AVX512F-FAST-NEXT:    vinserti128 $1, %xmm13, %ymm0, %ymm13
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm2[0,1,1,3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm15 = ymm14[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,24,25]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm15 = ymm13[0,1,2,3,4,5,6],ymm15[7]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0],ymm4[1],ymm5[2,3,4],ymm4[5],ymm5[6,7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm7
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm7 = xmm7[0],xmm13[1],xmm7[2],xmm13[3],xmm7[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm9 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm9[4,5,10,11,0,1,22,23],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[6,7,4,5,2,3,0,1,14,15],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpor %ymm7, %ymm9, %ymm7
+; AVX512F-FAST-NEXT:    vpermd %zmm1, %zmm16, %zmm13
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm9 = <0,3,7,10,14,u,u,u>
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm7[0],ymm15[1,2,3,4,5,6,7],ymm7[8],ymm15[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm7 = ymm7[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm9, %zmm9
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0],ymm12[1],ymm11[2,3,4],ymm12[5],ymm11[6,7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm11, %xmm12
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0],xmm11[1],xmm12[2],xmm11[3],xmm12[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm11 = xmm11[0,1,6,7,4,5,2,3,0,1,14,15,u,u,u,u]
+; AVX512F-FAST-NEXT:    vinserti128 $1, %xmm11, %ymm0, %ymm11
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm14, %ymm10
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3,4,5,6],ymm10[7]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm10 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [8,9,4,5,4,5,6,7,8,9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm14
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm10, %xmm10
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm10[3,1,2,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm14 = xmm14[0],xmm10[0],xmm14[1],xmm10[1],xmm14[2],xmm10[2],xmm14[3],xmm10[3]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm10 = <u,u,u,u,u,u,u,u,0,1,6,7,8,9,14,15,16,17,22,23,20,21,22,23,16,17,22,23,24,25,30,31>
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm9, %ymm9
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm14[0,1],ymm9[2,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm9[0],ymm11[1,2,3,4,5,6,7],ymm9[8],ymm11[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm9 = ymm9[0,1,2,3],ymm11[4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <0,3,3,u,0,3,7,u>
+; AVX512F-FAST-NEXT:    vpermd %ymm2, %ymm11, %ymm11
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,0,1,6,7,8,9,u,u,u,u,u,u,u,u,u,u,16,17,22,23,24,25]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,0,1,6,7,8,9,14,15,u,u,u,u,u,u,u,u,16,17,22,23,24,25,30,31,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm13[0,1,2,3,4],ymm11[5,6,7],ymm13[8,9,10,11,12],ymm11[13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm13 = ymm5[0,1],ymm4[2,3],ymm5[4,5],ymm4[6,7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm13, %xmm14
+; AVX512F-FAST-NEXT:    vpshufb %xmm12, %xmm14, %xmm12
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = <1,4,8,11,15,u,u,u>
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [2,6,9,13,2,6,9,13]
+; AVX512F-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm16 = <0,4,7,11,14,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm13 = xmm13[10,11,6,7,4,5,6,7,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm13[0],xmm12[0],xmm13[1],xmm12[1],xmm13[2],xmm12[2],xmm13[3],xmm12[3]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm16, %zmm13
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm13[u,u,u,u,u,u,u,u,2,3,4,5,10,11,12,13,18,19,18,19,18,19,18,19,18,19,20,21,26,27,28,29]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1],ymm13[2,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermd %zmm1, %zmm15, %zmm1
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm11 = ymm12[0],ymm11[1,2,3,4,5,6,7],ymm12[8],ymm11[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm11 = ymm12[0,1,2,3],ymm11[4,5,6,7]
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [0,4,7,0,0,4,7,0]
+; AVX512F-FAST-NEXT:    # ymm12 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermd %ymm2, %ymm12, %ymm2
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,2,3,4,5,10,11,u,u,u,u,u,u,u,u,u,u,18,19,20,21,26,27]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,2,3,4,5,10,11,12,13,u,u,u,u,u,u,u,u,18,19,20,21,26,27,28,29,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2,3,4],ymm2[5,6,7],ymm1[8,9,10,11,12],ymm2[13,14,15]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm14, %zmm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm2 = ymm5[0,1,2],ymm4[3],ymm5[4,5],ymm4[6],ymm5[7]
+; AVX512F-FAST-NEXT:    vextracti128 $1, %ymm2, %xmm4
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[u,u,u,u,u,u,u,u,10,11,6,7,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,6,4,6,7]
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1],ymm0[2,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm0[0],ymm1[1,2,3,4,5,6,7],ymm0[8],ymm1[9,10,11,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqa %ymm3, (%rsi)
+; AVX512F-FAST-NEXT:    vmovdqa %ymm6, (%rdx)
+; AVX512F-FAST-NEXT:    vmovdqa %ymm8, (%rcx)
+; AVX512F-FAST-NEXT:    vmovdqa %ymm7, (%r8)
+; AVX512F-FAST-NEXT:    vmovdqa %ymm9, (%r9)
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa %ymm11, (%rax)
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa %ymm0, (%rax)
+; AVX512F-FAST-NEXT:    vzeroupper
+; AVX512F-FAST-NEXT:    retq
 ;
 ; AVX512BW-LABEL: load_i16_stride7_vf16:
 ; AVX512BW:       # %bb.0:
@@ -6874,13 +6723,13 @@ define void @load_i16_stride7_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [2,6,9,13,2,6,9,13]
 ; AVX512DQ-FAST-NEXT:    # ymm1 = mem[0,1,0,1]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 128(%rdi), %zmm24
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm30 = [2,5,9,12,2,5,9,12]
-; AVX512DQ-FAST-NEXT:    # ymm30 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [2,5,9,12,2,5,9,12]
+; AVX512DQ-FAST-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [10,3,6,15,12,13,6,15]
 ; AVX512DQ-FAST-NEXT:    vpermd %zmm29, %zmm2, %zmm0
 ; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm21 = [3,6,10,13,3,6,10,13]
-; AVX512DQ-FAST-NEXT:    # ymm21 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm21 = [3,6,10,13,3,6,10,13]
+; AVX512DQ-FAST-NEXT:    # ymm21 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 384(%rdi), %zmm17
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm19 = <1,u,u,u,5,8,12,15>
 ; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = <2,6,9,u,13,u,u,u>
@@ -15474,8 +15323,8 @@ define void @load_i16_stride7_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm26, %ymm2
 ; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm2, %ymm2
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 576(%rdi), %zmm18
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm26 = [3,6,10,13,3,6,10,13]
-; AVX512DQ-FAST-NEXT:    # ymm26 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [3,6,10,13,3,6,10,13]
+; AVX512DQ-FAST-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vpermd %zmm18, %zmm26, %zmm3
 ; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm6 = <0,1,2,3,0,1,6,7,8,9,14,15,12,13,14,15,16,17,18,19,16,17,22,23,24,25,30,31,u,u,u,u>
 ; AVX512DQ-FAST-NEXT:    vpshufb %ymm6, %ymm3, %ymm3

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-8.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-8.ll
index e3d60d0a4dc1eb8..c7fca67c75aea54 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-8.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i16-stride-8.ll
@@ -8744,567 +8744,286 @@ define void @load_i16_stride8_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512F-FAST-NEXT:    vzeroupper
 ; AVX512F-FAST-NEXT:    retq
 ;
-; AVX512BW-ONLY-LABEL: load_i16_stride8_vf64:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512BW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    movb $-64, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
-; AVX512BW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm9, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm6, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm4, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512BW-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm9, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm4, %zmm2
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
-; AVX512BW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
-; AVX512BW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
-; AVX512BW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm15, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
-; AVX512BW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm13, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
-; AVX512BW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm22, %zmm1, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm0, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm3, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm10, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm15, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm13, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm20, %zmm1, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm3, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm10, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm15, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm13, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm17, %zmm1, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm4, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm0, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm3, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm10, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm15, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm13, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm21, %zmm1, %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm0, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm6, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm10, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm15, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm13, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm1, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm28, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512BW-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
-; AVX512BW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm3, %zmm9
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
-; AVX512BW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm6, %zmm8
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm9, %zmm11
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
-; AVX512BW-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm12, %zmm14
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
-; AVX512BW-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm5, %zmm19, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2w %zmm28, %zmm19, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512BW-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
-;
-; AVX512DQBW-ONLY-LABEL: load_i16_stride8_vf64:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    movb $-64, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm9, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm6, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm4, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512DQBW-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm9, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm4, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
-; AVX512DQBW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
-; AVX512DQBW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
-; AVX512DQBW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm15, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
-; AVX512DQBW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm13, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
-; AVX512DQBW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm22, %zmm1, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm0, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm3, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm10, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm15, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm13, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm20, %zmm1, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm3, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm10, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm15, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm13, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm17, %zmm1, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm4, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm0, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm3, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm10, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm15, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm13, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm21, %zmm1, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm0, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm6, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm10, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm15, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm13, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm1, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm0, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm28, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
-; AVX512DQBW-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
-; AVX512DQBW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm3, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
-; AVX512DQBW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm6, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm9, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
-; AVX512DQBW-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm12, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
-; AVX512DQBW-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm5, %zmm19, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermi2w %zmm28, %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2w %zmm28, %zmm19, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: load_i16_stride8_vf64:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $1096, %rsp # imm = 0x448
+; AVX512BW-NEXT:    vmovdqa64 704(%rdi), %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 640(%rdi), %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 576(%rdi), %zmm28
+; AVX512BW-NEXT:    vmovdqa64 512(%rdi), %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 832(%rdi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 768(%rdi), %zmm29
+; AVX512BW-NEXT:    vmovdqa64 960(%rdi), %zmm21
+; AVX512BW-NEXT:    vmovdqa64 896(%rdi), %zmm25
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm17
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm9
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 256(%rdi), %zmm26
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm12
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm2
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm0, %zmm2
+; AVX512BW-NEXT:    movb $-64, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,32,40,48,56,0,8,16,24,32,40,48,56]
+; AVX512BW-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2w %zmm9, %zmm1, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm0, %zmm4
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm2
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm0, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm3
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermi2w %zmm6, %zmm8, %zmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm1
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm4, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
+; AVX512BW-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512BW-NEXT:    vpermt2w %zmm9, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm2
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm4, %zmm2
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm0, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
+; AVX512BW-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm2
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
+; AVX512BW-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm15, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
+; AVX512BW-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm13, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
+; AVX512BW-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2w %zmm22, %zmm1, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm24
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm0, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm27
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm3, %zmm27
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm31
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm10, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm30
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm15, %zmm30
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm22
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm13, %zmm22
+; AVX512BW-NEXT:    vpermt2w %zmm20, %zmm1, %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm8
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm0, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm12
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm3, %zmm12
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm10, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm15, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm13, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2w %zmm17, %zmm1, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm18
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm4, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm19
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm0, %zmm19
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm3, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm16
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm10, %zmm16
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm20
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm15, %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm17
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm13, %zmm17
+; AVX512BW-NEXT:    vpermt2w %zmm21, %zmm1, %zmm25
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm9
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm4, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm11
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm0, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm14
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm6, %zmm14
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm21
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm10, %zmm21
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm2
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm15, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm15
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm13, %zmm15
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm1, %zmm29
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm4
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm3
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm6
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm10
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm0, %zmm13
+; AVX512BW-NEXT:    vpermt2w %zmm28, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,33,41,49,57,1,9,17,25,33,41,49,57]
+; AVX512BW-NEXT:    # ymm0 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,34,42,50,58,2,10,18,26,34,42,50,58]
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm9, %zmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,35,43,51,59,3,11,19,27,35,43,51,59]
+; AVX512BW-NEXT:    # ymm3 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm9
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm3, %zmm9
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,36,44,52,60,4,12,20,28,36,44,52,60]
+; AVX512BW-NEXT:    # ymm6 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm6, %zmm8
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,37,45,53,61,5,13,21,29,37,45,53,61]
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm11
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm9, %zmm11
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,38,46,54,62,6,14,22,30,38,46,54,62]
+; AVX512BW-NEXT:    # ymm12 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm14
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm12, %zmm14
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,39,47,55,63,7,15,23,31,39,47,55,63]
+; AVX512BW-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2w %zmm5, %zmm19, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm5
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm6
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm9
+; AVX512BW-NEXT:    vpermi2w %zmm28, %zmm2, %zmm12
+; AVX512BW-NEXT:    vpermt2w %zmm28, %zmm19, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm0
+; AVX512BW-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 64(%rsi)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, (%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, 64(%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, (%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 64(%r9)
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, (%r9)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, (%rax)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, (%rax)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, (%rax)
+; AVX512BW-NEXT:    addq $1096, %rsp # imm = 0x448
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %wide.vec = load <512 x i16>, ptr %in.vec, align 64
   %strided.vec0 = shufflevector <512 x i16> %wide.vec, <512 x i16> poison, <64 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 64, i32 72, i32 80, i32 88, i32 96, i32 104, i32 112, i32 120, i32 128, i32 136, i32 144, i32 152, i32 160, i32 168, i32 176, i32 184, i32 192, i32 200, i32 208, i32 216, i32 224, i32 232, i32 240, i32 248, i32 256, i32 264, i32 272, i32 280, i32 288, i32 296, i32 304, i32 312, i32 320, i32 328, i32 336, i32 344, i32 352, i32 360, i32 368, i32 376, i32 384, i32 392, i32 400, i32 408, i32 416, i32 424, i32 432, i32 440, i32 448, i32 456, i32 464, i32 472, i32 480, i32 488, i32 496, i32 504>
   %strided.vec1 = shufflevector <512 x i16> %wide.vec, <512 x i16> poison, <64 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 65, i32 73, i32 81, i32 89, i32 97, i32 105, i32 113, i32 121, i32 129, i32 137, i32 145, i32 153, i32 161, i32 169, i32 177, i32 185, i32 193, i32 201, i32 209, i32 217, i32 225, i32 233, i32 241, i32 249, i32 257, i32 265, i32 273, i32 281, i32 289, i32 297, i32 305, i32 313, i32 321, i32 329, i32 337, i32 345, i32 353, i32 361, i32 369, i32 377, i32 385, i32 393, i32 401, i32 409, i32 417, i32 425, i32 433, i32 441, i32 449, i32 457, i32 465, i32 473, i32 481, i32 489, i32 497, i32 505>
@@ -9334,6 +9053,7 @@ define void @load_i16_stride8_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512-FAST: {{.*}}
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
@@ -9341,6 +9061,7 @@ define void @load_i16_stride8_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F: {{.*}}
 ; AVX512F-ONLY: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i32-stride-8.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i32-stride-8.ll
index 794356f8bc279c7..1bd7025307d2b95 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i32-stride-8.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i32-stride-8.ll
@@ -3687,1129 +3687,567 @@ define void @load_i32_stride8_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: load_i32_stride8_vf32:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512F-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
-; AVX512F-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
-; AVX512F-ONLY-NEXT:    movb $-64, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
-; AVX512F-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
-; AVX512F-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
-; AVX512F-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
-; AVX512F-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
-; AVX512F-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
-; AVX512F-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
-; AVX512F-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
-; AVX512F-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
-; AVX512F-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
-; AVX512F-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
-; AVX512F-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
-; AVX512F-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
-; AVX512F-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
-; AVX512F-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
-; AVX512F-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512F-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512F-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512F-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512F-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512F-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512F-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512F-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: load_i32_stride8_vf32:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
-; AVX512DQ-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
-; AVX512DQ-ONLY-NEXT:    movb $-64, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
-; AVX512DQ-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
-; AVX512DQ-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
-; AVX512DQ-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
-; AVX512DQ-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
-; AVX512DQ-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
-; AVX512DQ-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
-; AVX512DQ-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
-; AVX512DQ-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
-; AVX512DQ-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
-; AVX512DQ-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
-; AVX512DQ-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
-; AVX512DQ-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
-; AVX512DQ-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
-; AVX512DQ-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
-; AVX512DQ-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512DQ-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: load_i32_stride8_vf32:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512BW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    movb $-64, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
-; AVX512BW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
-; AVX512BW-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
-; AVX512BW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
-; AVX512BW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
-; AVX512BW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
-; AVX512BW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
-; AVX512BW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
-; AVX512BW-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
-; AVX512BW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
-; AVX512BW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
-; AVX512BW-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
-; AVX512BW-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512BW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512BW-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: load_i32_stride8_vf32:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    subq $1096, %rsp # imm = 0x448
+; AVX512F-NEXT:    vmovdqa64 704(%rdi), %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 640(%rdi), %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 576(%rdi), %zmm28
+; AVX512F-NEXT:    vmovdqa64 512(%rdi), %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 832(%rdi), %zmm5
+; AVX512F-NEXT:    vmovdqa64 768(%rdi), %zmm29
+; AVX512F-NEXT:    vmovdqa64 960(%rdi), %zmm21
+; AVX512F-NEXT:    vmovdqa64 896(%rdi), %zmm25
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm17
+; AVX512F-NEXT:    vmovdqa64 128(%rdi), %zmm4
+; AVX512F-NEXT:    vmovdqa64 192(%rdi), %zmm9
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %zmm20
+; AVX512F-NEXT:    vmovdqa64 256(%rdi), %zmm26
+; AVX512F-NEXT:    vmovdqa64 448(%rdi), %zmm22
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
+; AVX512F-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm12
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm2
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
+; AVX512F-NEXT:    movb $-64, %al
+; AVX512F-NEXT:    kmovw %eax, %k1
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
+; AVX512F-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm2
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm3
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
+; AVX512F-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm1
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
+; AVX512F-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm2
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
+; AVX512F-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
+; AVX512F-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
+; AVX512F-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm2
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
+; AVX512F-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
+; AVX512F-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
+; AVX512F-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
+; AVX512F-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm24
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm27
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm31
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm30
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm22
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
+; AVX512F-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm8
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm12
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
+; AVX512F-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm18
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm19
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm23
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm16
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm20
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm17
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
+; AVX512F-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm9
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm11
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm14
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm21
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm15, %zmm2
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm15
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
+; AVX512F-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
+; AVX512F-NEXT:    # ymm0 = mem[0,1,0,1]
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
+; AVX512F-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
+; AVX512F-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
+; AVX512F-NEXT:    # ymm3 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm9
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
+; AVX512F-NEXT:    # ymm6 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
+; AVX512F-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm11
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
+; AVX512F-NEXT:    # ymm12 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm14
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
+; AVX512F-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm5
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
+; AVX512F-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
+; AVX512F-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm0
+; AVX512F-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
+; AVX512F-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
+; AVX512F-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
+; AVX512F-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
+; AVX512F-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
+; AVX512F-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 64(%rsi)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, (%rsi)
+; AVX512F-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, (%rdx)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm24, (%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm3, 64(%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm4, (%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm6, 64(%r9)
+; AVX512F-NEXT:    vmovdqa64 %zmm8, (%r9)
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm10, (%rax)
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm9, (%rax)
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm11, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, (%rax)
+; AVX512F-NEXT:    addq $1096, %rsp # imm = 0x448
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: load_i32_stride8_vf32:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $1096, %rsp # imm = 0x448
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    movb $-64, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
-; AVX512DQBW-ONLY-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
-; AVX512DQBW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
-; AVX512DQBW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
-; AVX512DQBW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
-; AVX512DQBW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
-; AVX512DQBW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
-; AVX512DQBW-ONLY-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
-; AVX512DQBW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
-; AVX512DQBW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
-; AVX512DQBW-ONLY-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
-; AVX512DQBW-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 64(%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, (%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r9)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, (%r9)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $1096, %rsp # imm = 0x448
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: load_i32_stride8_vf32:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $1096, %rsp # imm = 0x448
+; AVX512BW-NEXT:    vmovdqa64 704(%rdi), %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 640(%rdi), %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 576(%rdi), %zmm28
+; AVX512BW-NEXT:    vmovdqa64 512(%rdi), %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 832(%rdi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 768(%rdi), %zmm29
+; AVX512BW-NEXT:    vmovdqa64 960(%rdi), %zmm21
+; AVX512BW-NEXT:    vmovdqa64 896(%rdi), %zmm25
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm17
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm9
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 256(%rdi), %zmm26
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [0,8,16,24,0,8,16,24,0,8,16,24,0,8,16,24]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm12
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm2
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm0, %zmm2
+; AVX512BW-NEXT:    movb $-64, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm2 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [0,8,16,24,0,8,16,24]
+; AVX512BW-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2d %zmm9, %zmm1, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm0, %zmm4
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm3 = ymm4[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm2, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm2
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm0, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm3
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermi2d %zmm6, %zmm8, %zmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm1[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [1,9,17,25,1,9,17,25,1,9,17,25,1,9,17,25]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm1
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm4, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm1 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [1,9,17,25,1,9,17,25]
+; AVX512BW-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512BW-NEXT:    vpermt2d %zmm9, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm2
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm4, %zmm2
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [2,10,18,26,2,10,18,26,2,10,18,26,2,10,18,26]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm0, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [3,11,19,27,3,11,19,27,3,11,19,27,3,11,19,27]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [4,12,20,28,4,12,20,28,4,12,20,28,4,12,20,28]
+; AVX512BW-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm2
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [5,13,21,29,5,13,21,29,5,13,21,29,5,13,21,29]
+; AVX512BW-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm15, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,22,30,6,14,22,30,6,14,22,30,6,14,22,30]
+; AVX512BW-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm1
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm13, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [7,15,23,31,7,15,23,31,7,15,23,31,7,15,23,31]
+; AVX512BW-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2d %zmm22, %zmm1, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm24
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm0, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm27
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm3, %zmm27
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm31
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm10, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm30
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm15, %zmm30
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm22
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm13, %zmm22
+; AVX512BW-NEXT:    vpermt2d %zmm20, %zmm1, %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm8
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm0, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm12
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm3, %zmm12
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm10, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm15, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm6
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm13, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2d %zmm17, %zmm1, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm18
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm4, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm19
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm0, %zmm19
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm3, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm16
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm10, %zmm16
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm20
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm15, %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm17
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm13, %zmm17
+; AVX512BW-NEXT:    vpermt2d %zmm21, %zmm1, %zmm25
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm9
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm4, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm11
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm0, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm14
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm6, %zmm14
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm21
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm10, %zmm21
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm2
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm15, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm15
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm13, %zmm15
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm1, %zmm29
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm4
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm3
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm6
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm10
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm0, %zmm13
+; AVX512BW-NEXT:    vpermt2d %zmm28, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm9 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm28 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [1,9,17,25,1,9,17,25]
+; AVX512BW-NEXT:    # ymm0 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm4[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm9, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [2,10,18,26,2,10,18,26]
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm9, %zmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm8[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm24, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm11 {%k1}
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm3[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm11, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm27 {%k1}
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [3,11,19,27,3,11,19,27]
+; AVX512BW-NEXT:    # ymm3 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm9
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm3, %zmm9
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm12[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm27, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm14 {%k1}
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm3 = ymm6[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm14, %zmm3
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,20,28,4,12,20,28]
+; AVX512BW-NEXT:    # ymm6 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm6, %zmm8
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [5,13,21,29,5,13,21,29]
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm11
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm9, %zmm11
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [6,14,22,30,6,14,22,30]
+; AVX512BW-NEXT:    # ymm12 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm14
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm12, %zmm14
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,23,31,7,15,23,31]
+; AVX512BW-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2d %zmm5, %zmm19, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm5
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm6
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm9
+; AVX512BW-NEXT:    vpermi2d %zmm28, %zmm2, %zmm12
+; AVX512BW-NEXT:    vpermt2d %zmm28, %zmm19, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm0
+; AVX512BW-NEXT:    vpblendd $15, (%rsp), %ymm8, %ymm8 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm8 = mem[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm31 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm31, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm6 = ymm10[0,1,2,3],ymm6[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm21 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm6, %zmm21, %zmm6
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm10 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm10 = mem[0,1,2,3],ymm11[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm30 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm30, %zmm10
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9, %ymm9 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,2,3],ymm9[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, %zmm7 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm9, %zmm7, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm22 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm9 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,2,3],ymm14[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm9, %zmm22, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm15 {%k1}
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm2 = ymm13[0,1,2,3],ymm12[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm26 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5, %ymm11 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm11 = mem[0,1,2,3],ymm5[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm11, %zmm26, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm29 {%k1}
+; AVX512BW-NEXT:    vpblendd $15, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm11 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm11 = mem[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm11, %zmm29, %zmm11
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 64(%rsi)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, 64(%rdx)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 64(%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, (%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, 64(%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, (%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 64(%r9)
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, (%r9)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, (%rax)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, (%rax)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, (%rax)
+; AVX512BW-NEXT:    addq $1096, %rsp # imm = 0x448
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %wide.vec = load <256 x i32>, ptr %in.vec, align 64
   %strided.vec0 = shufflevector <256 x i32> %wide.vec, <256 x i32> poison, <32 x i32> <i32 0, i32 8, i32 16, i32 24, i32 32, i32 40, i32 48, i32 56, i32 64, i32 72, i32 80, i32 88, i32 96, i32 104, i32 112, i32 120, i32 128, i32 136, i32 144, i32 152, i32 160, i32 168, i32 176, i32 184, i32 192, i32 200, i32 208, i32 216, i32 224, i32 232, i32 240, i32 248>
   %strided.vec1 = shufflevector <256 x i32> %wide.vec, <256 x i32> poison, <32 x i32> <i32 1, i32 9, i32 17, i32 25, i32 33, i32 41, i32 49, i32 57, i32 65, i32 73, i32 81, i32 89, i32 97, i32 105, i32 113, i32 121, i32 129, i32 137, i32 145, i32 153, i32 161, i32 169, i32 177, i32 185, i32 193, i32 201, i32 209, i32 217, i32 225, i32 233, i32 241, i32 249>
@@ -9939,14 +9377,18 @@ define void @load_i32_stride8_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX2-FAST-PERLANE: {{.*}}
 ; AVX2-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
+; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-FAST: {{.*}}
+; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}
 ; AVX512F-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i64-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i64-stride-7.ll
index 104e42930d4c7eb..7d9c056716ceeb2 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i64-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i64-stride-7.ll
@@ -3678,1787 +3678,896 @@ define void @load_i64_stride7_vf32(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: load_i64_stride7_vf32:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    subq $2216, %rsp # imm = 0x8A8
-; AVX512F-ONLY-NEXT:    vmovdqa64 1664(%rdi), %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 1600(%rdi), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 1216(%rdi), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 1152(%rdi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 1088(%rdi), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovaps 1024(%rdi), %zmm0
-; AVX512F-ONLY-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
-; AVX512F-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
-; AVX512F-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
-; AVX512F-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm13, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [12,5,12,5,12,5,12,5]
-; AVX512F-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm20, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm20, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm25
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
-; AVX512F-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm28, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm28, %zmm16
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm30 = [9,0,7,0,9,0,7,0]
-; AVX512F-ONLY-NEXT:    # zmm30 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm14
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm30, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm4, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm5
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 1536(%rdi), %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 1472(%rdi), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm20, %zmm29
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm31
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm28, %zmm22
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm5, %zmm21, %zmm30
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm1
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 1280(%rdi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 1728(%rdi), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm30
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
-; AVX512F-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    movb $24, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512F-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm21, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm11[4,5,4,5],zmm10[4,5,4,5]
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [14,0,0,7,14,0,0,7]
-; AVX512F-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm10, %zmm26
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm16 = [7,0,9,0,7,0,9,0]
-; AVX512F-ONLY-NEXT:    # zmm16 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm16, %zmm27
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,11,4,11]
-; AVX512F-ONLY-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm20, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm21, %zmm17
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm17 {%k1} = zmm12[4,5,4,5],zmm24[4,5,4,5]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm10, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm16, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm20, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm21, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 1408(%rdi), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 1344(%rdi), %zmm8
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm3[4,5,4,5],zmm15[4,5,4,5]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm15, %zmm3, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm16, %zmm24
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm3, %zmm15, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm20, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm12 {%k1} = zmm2[4,5,4,5],zmm9[4,5,4,5]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm20, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 912(%rdi), %xmm2
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} xmm2 = xmm2[0,1],mem[2,3]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm2, %zmm0, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 464(%rdi), %xmm0
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm26, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm0
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa 1360(%rdi), %xmm0
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm10, %zmm20
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 1024(%rdi), %ymm2
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,11]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm13
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm22, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 576(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm3
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqu %ymm3, (%rsp) # 32-byte Spill
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm10, %zmm3
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 1472(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm10
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa 1088(%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpalignr {{.*#+}} ymm0 = ymm2[8,9,10,11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7],ymm2[24,25,26,27,28,29,30,31],ymm0[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512F-ONLY-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa {{.*#+}} xmm2 = [5,12]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa {{.*#+}} xmm7 = [6,13]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm7, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm0, %zmm1
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm8, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa 960(%rdi), %ymm15
-; AVX512F-ONLY-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vextracti32x4 $1, %ymm15, %xmm19
-; AVX512F-ONLY-NEXT:    vmovdqa 512(%rdi), %ymm15
-; AVX512F-ONLY-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vextracti128 $1, %ymm15, %xmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm2, %zmm9
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm7, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm7, %zmm11
-; AVX512F-ONLY-NEXT:    movb $-32, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm17 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm18 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm12 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm21 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm22 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm29 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm26 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm20 {%k2}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm19, %zmm24, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm4 {%k2}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm15, %zmm27, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %ymm23, %ymm7
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm9
-; AVX512F-ONLY-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vextracti128 $1, %ymm9, %xmm9
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm9, %zmm25, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm15, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa 640(%rdi), %ymm15
-; AVX512F-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vmovdqa 1408(%rdi), %ymm7
-; AVX512F-ONLY-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vextracti128 $1, %ymm7, %xmm7
-; AVX512F-ONLY-NEXT:    vinserti32x4 $0, %xmm7, %zmm16, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm7 {%k2}
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm15
-; AVX512F-ONLY-NEXT:    vpalignr $8, (%rsp), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa 1536(%rdi), %ymm15
-; AVX512F-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm30, %zmm2
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm15 = ymm15[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinsertf64x4 $0, %ymm15, %zmm16, %zmm15
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm14, %zmm16, %zmm14
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm16, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm11 = ymm11[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm16, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, 128(%rsi)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rsi)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, (%rsi)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, (%rdx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 128(%rdx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, 192(%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, (%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, 64(%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, 128(%rcx)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 192(%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, (%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, 128(%r8)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%r9)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, (%r9)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 64(%r9)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 128(%r9)
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, 128(%rax)
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, 128(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, (%rax)
-; AVX512F-ONLY-NEXT:    vmovaps %zmm15, 64(%rax)
-; AVX512F-ONLY-NEXT:    addq $2216, %rsp # imm = 0x8A8
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: load_i64_stride7_vf32:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    subq $2216, %rsp # imm = 0x8A8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1664(%rdi), %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1600(%rdi), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1216(%rdi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1152(%rdi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1088(%rdi), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovaps 1024(%rdi), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 576(%rdi), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm26
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
-; AVX512DQ-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
-; AVX512DQ-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
-; AVX512DQ-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm13, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [12,5,12,5,12,5,12,5]
-; AVX512DQ-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm20, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm20, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm25
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
-; AVX512DQ-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm28, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm28, %zmm16
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm30 = [9,0,7,0,9,0,7,0]
-; AVX512DQ-ONLY-NEXT:    # zmm30 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm14
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm30, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm4, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1536(%rdi), %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1472(%rdi), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm20, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm28, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm3, %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm5, %zmm21, %zmm30
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1280(%rdi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1728(%rdi), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm30
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
-; AVX512DQ-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    movb $24, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm21, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm11[4,5,4,5],zmm10[4,5,4,5]
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [14,0,0,7,14,0,0,7]
-; AVX512DQ-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm10, %zmm26
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm16 = [7,0,9,0,7,0,9,0]
-; AVX512DQ-ONLY-NEXT:    # zmm16 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm16, %zmm27
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm20 = [4,11,4,11]
-; AVX512DQ-ONLY-NEXT:    # ymm20 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm20, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm21, %zmm17
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm17 {%k1} = zmm12[4,5,4,5],zmm24[4,5,4,5]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm10, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm16, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm20, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm21, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1408(%rdi), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 1344(%rdi), %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm3[4,5,4,5],zmm15[4,5,4,5]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm15, %zmm3, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm16, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm3, %zmm15, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm20, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm12 {%k1} = zmm2[4,5,4,5],zmm9[4,5,4,5]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm20, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 912(%rdi), %xmm2
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} xmm2 = xmm2[0,1],mem[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm2, %zmm0, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 464(%rdi), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm26, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1360(%rdi), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm10, %zmm20
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1024(%rdi), %ymm2
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,11]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm22, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 576(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu %ymm3, (%rsp) # 32-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm10, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1472(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1088(%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpalignr {{.*#+}} ymm0 = ymm2[8,9,10,11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7],ymm2[24,25,26,27,28,29,30,31],ymm0[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa {{.*#+}} xmm2 = [5,12]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa {{.*#+}} xmm7 = [6,13]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm7, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm0, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm8, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 960(%rdi), %ymm15
-; AVX512DQ-ONLY-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vextracti32x4 $1, %ymm15, %xmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa 512(%rdi), %ymm15
-; AVX512DQ-ONLY-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vextracti128 $1, %ymm15, %xmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm2, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm8, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm7, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm7, %zmm11
-; AVX512DQ-ONLY-NEXT:    movb $-32, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm17 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm18 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm12 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm21 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm22 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm29 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm26 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm20 {%k2}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm19, %zmm24, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm4 {%k2}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm15, %zmm27, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm6 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %ymm23, %ymm7
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm9
-; AVX512DQ-ONLY-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vextracti128 $1, %ymm9, %xmm9
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm9, %zmm25, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm15, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa 640(%rdi), %ymm15
-; AVX512DQ-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1408(%rdi), %ymm7
-; AVX512DQ-ONLY-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vextracti128 $1, %ymm7, %xmm7
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $0, %xmm7, %zmm16, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm7 {%k2}
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm15
-; AVX512DQ-ONLY-NEXT:    vpalignr $8, (%rsp), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa 1536(%rdi), %ymm15
-; AVX512DQ-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm30, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm15 = ymm15[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinsertf64x4 $0, %ymm15, %zmm16, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm14, %zmm16, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm16, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm11 = ymm11[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm16, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, 128(%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, 64(%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, (%rsi)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, (%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 128(%rdx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, 192(%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, (%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, 64(%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, 128(%rcx)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 192(%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, (%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, 64(%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, 128(%r8)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%r9)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, (%r9)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 64(%r9)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 128(%r9)
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, (%rax)
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm15, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    addq $2216, %rsp # imm = 0x8A8
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: load_i64_stride7_vf32:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $2152, %rsp # imm = 0x868
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1664(%rdi), %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1600(%rdi), %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1216(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1152(%rdi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1088(%rdi), %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1024(%rdi), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovaps 576(%rdi), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
-; AVX512BW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm31, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
-; AVX512BW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
-; AVX512BW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm18
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm19 = [12,5,12,5,12,5,12,5]
-; AVX512BW-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm19, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm24
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
-; AVX512BW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm28, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm28, %zmm25
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm29 = [9,0,7,0,9,0,7,0]
-; AVX512BW-ONLY-NEXT:    # zmm29 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm29, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm7, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm31, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1536(%rdi), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1472(%rdi), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm19, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm28, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm5, %zmm21, %zmm29
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1280(%rdi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1728(%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm29
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
-; AVX512BW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    movb $24, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm21, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm24 {%k1} = zmm14[4,5,4,5],zmm11[4,5,4,5]
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [14,0,0,7,14,0,0,7]
-; AVX512BW-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm11, %zmm13
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [7,0,9,0,7,0,9,0]
-; AVX512BW-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm17, %zmm26
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [4,11,4,11]
-; AVX512BW-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm19, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm21, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm0[4,5,4,5],zmm22[4,5,4,5]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm17, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm21, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1408(%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1344(%rdi), %zmm8
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm6[4,5,4,5],zmm3[4,5,4,5]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm3, %zmm6, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm3, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm16 {%k1} = zmm1[4,5,4,5],zmm22[4,5,4,5]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 912(%rdi), %xmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} xmm1 = xmm1[0,1],mem[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 464(%rdi), %xmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm13, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa 1360(%rdi), %xmm0
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm11, %zmm22
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 1024(%rdi), %ymm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512BW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm1 = [4,11]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm11
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm20, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 576(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 1472(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm1 = [5,12]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm6 = [6,13]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm0, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm8, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm6, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa 1088(%rdi), %ymm5
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} ymm5 = ymm19[8,9,10,11,12,13,14,15],ymm5[0,1,2,3,4,5,6,7],ymm19[24,25,26,27,28,29,30,31],ymm5[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm12, %zmm28
-; AVX512BW-ONLY-NEXT:    movb $-32, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm24 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm12 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm19 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm23 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa 960(%rdi), %ymm6
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} ymm6 = mem[8,9,10,11,12,13,14,15],ymm6[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm6[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vextracti128 $1, %ymm6, %xmm6
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm6, %zmm25, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm6 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa 512(%rdi), %ymm7
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vextracti128 $1, %ymm7, %xmm7
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm7, %zmm26, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm7 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm9
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vextracti128 $1, %ymm9, %xmm9
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm9, %zmm30, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 1408(%rdi), %ymm20
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} ymm20 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vextracti32x4 $1, %ymm20, %xmm20
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $0, %xmm20, %zmm17, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm17 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 640(%rdi), %ymm20
-; AVX512BW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm20, %ymm5 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm5 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm13, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm13
-; AVX512BW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm13[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm13, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa 1536(%rdi), %ymm13
-; AVX512BW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm13[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm29, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm13 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm13 = ymm15[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm14, %zmm15, %zmm14
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm15, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm10 = ymm10[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, 128(%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, 64(%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, (%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, (%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, 128(%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, 192(%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, (%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, 64(%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, 128(%rcx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, 192(%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, (%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%r8)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 128(%r8)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 192(%r9)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%r9)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 64(%r9)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 128(%r9)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, (%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm28, 128(%rax)
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, 128(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, (%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, 64(%rax)
-; AVX512BW-ONLY-NEXT:    addq $2152, %rsp # imm = 0x868
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: load_i64_stride7_vf32:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    subq $2216, %rsp # imm = 0x8A8
+; AVX512F-NEXT:    vmovdqa64 1664(%rdi), %zmm21
+; AVX512F-NEXT:    vmovdqa64 1600(%rdi), %zmm3
+; AVX512F-NEXT:    vmovdqa64 1216(%rdi), %zmm1
+; AVX512F-NEXT:    vmovdqa64 1152(%rdi), %zmm6
+; AVX512F-NEXT:    vmovdqa64 1088(%rdi), %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovaps 1024(%rdi), %zmm0
+; AVX512F-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 768(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 704(%rdi), %zmm9
+; AVX512F-NEXT:    vmovdqa64 640(%rdi), %zmm15
+; AVX512F-NEXT:    vmovdqa64 576(%rdi), %zmm10
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %zmm0
+; AVX512F-NEXT:    vmovdqa64 256(%rdi), %zmm26
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
+; AVX512F-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm3, %zmm7
+; AVX512F-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
+; AVX512F-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm13
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
+; AVX512F-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm13, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm13, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [12,5,12,5,12,5,12,5]
+; AVX512F-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm20, %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm20, %zmm23
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm7, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm7, %zmm25
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
+; AVX512F-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm28, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm28, %zmm16
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm30 = [9,0,7,0,9,0,7,0]
+; AVX512F-NEXT:    # zmm30 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm30, %zmm14
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm30, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm4, %zmm26
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm15, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 192(%rdi), %zmm24
+; AVX512F-NEXT:    vmovdqa64 128(%rdi), %zmm12
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm7, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm5
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm3, %zmm13
+; AVX512F-NEXT:    vmovdqa64 1536(%rdi), %zmm15
+; AVX512F-NEXT:    vmovdqa64 1472(%rdi), %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm7, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm20, %zmm29
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm3, %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm31
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm7, %zmm31
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm3, %zmm2
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm28, %zmm22
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm3, %zmm28
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm30, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512F-NEXT:    vpermi2q %zmm5, %zmm21, %zmm30
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %zmm1
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm5
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 832(%rdi), %zmm5
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 1280(%rdi), %zmm6
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
+; AVX512F-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 1728(%rdi), %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm27
+; AVX512F-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
+; AVX512F-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
+; AVX512F-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm19
+; AVX512F-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
+; AVX512F-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm18
+; AVX512F-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm23
+; AVX512F-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm20
+; AVX512F-NEXT:    vmovdqu64 %zmm20, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
+; AVX512F-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm25
+; AVX512F-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm22
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm16
+; AVX512F-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm14
+; AVX512F-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm30
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
+; AVX512F-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm4, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm26
+; AVX512F-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
+; AVX512F-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    movb $24, %al
+; AVX512F-NEXT:    kmovw %eax, %k1
+; AVX512F-NEXT:    vmovdqa64 512(%rdi), %zmm5
+; AVX512F-NEXT:    vmovdqa64 448(%rdi), %zmm7
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm21, %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm11[4,5,4,5],zmm10[4,5,4,5]
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [14,0,0,7,14,0,0,7]
+; AVX512F-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm10, %zmm26
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm16 = [7,0,9,0,7,0,9,0]
+; AVX512F-NEXT:    # zmm16 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm16, %zmm27
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,11,4,11]
+; AVX512F-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm20, %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm1
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm14
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm21, %zmm17
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm17 {%k1} = zmm12[4,5,4,5],zmm24[4,5,4,5]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm10, %zmm23
+; AVX512F-NEXT:    vmovdqa64 %zmm24, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm16, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm20, %zmm12
+; AVX512F-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 960(%rdi), %zmm6
+; AVX512F-NEXT:    vmovdqa64 896(%rdi), %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm21, %zmm12
+; AVX512F-NEXT:    vmovdqa64 1408(%rdi), %zmm4
+; AVX512F-NEXT:    vmovdqa64 1344(%rdi), %zmm8
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm8, %zmm21
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm3[4,5,4,5],zmm15[4,5,4,5]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
+; AVX512F-NEXT:    vpermi2q %zmm15, %zmm3, %zmm10
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm16, %zmm24
+; AVX512F-NEXT:    vpermi2q %zmm3, %zmm15, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm20, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm12 {%k1} = zmm2[4,5,4,5],zmm9[4,5,4,5]
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm20, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 912(%rdi), %xmm2
+; AVX512F-NEXT:    vpblendd {{.*#+}} xmm2 = xmm2[0,1],mem[2,3]
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm2, %zmm0, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 464(%rdi), %xmm0
+; AVX512F-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm0, %zmm26, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 16(%rdi), %xmm0
+; AVX512F-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm26
+; AVX512F-NEXT:    vmovdqa 1360(%rdi), %xmm0
+; AVX512F-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm0, %zmm10, %zmm20
+; AVX512F-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
+; AVX512F-NEXT:    vmovdqa 1024(%rdi), %ymm2
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm2[0,1,2,3,4,5],ymm0[6,7]
+; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,11]
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm13
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm10, %zmm13
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm13[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm22, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
+; AVX512F-NEXT:    vmovdqa 576(%rdi), %ymm3
+; AVX512F-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm10, %zmm3
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
+; AVX512F-NEXT:    vmovdqa 128(%rdi), %ymm3
+; AVX512F-NEXT:    vmovdqu %ymm3, (%rsp) # 32-byte Spill
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm10, %zmm3
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
+; AVX512F-NEXT:    vmovdqa 1472(%rdi), %ymm3
+; AVX512F-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm8, %zmm10
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm10[0,1,2,3],ymm0[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm28
+; AVX512F-NEXT:    vmovdqa 1088(%rdi), %ymm0
+; AVX512F-NEXT:    vpalignr {{.*#+}} ymm0 = ymm2[8,9,10,11,12,13,14,15],ymm0[0,1,2,3,4,5,6,7],ymm2[24,25,26,27,28,29,30,31],ymm0[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vmovdqa64 %ymm0, %ymm23
+; AVX512F-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm0, %zmm3
+; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm2 = [5,12]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm13
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm2, %zmm13
+; AVX512F-NEXT:    vmovdqa {{.*#+}} xmm7 = [6,13]
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm7, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm0, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm2, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm7, %zmm14
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm0, %zmm1
+; AVX512F-NEXT:    vpermi2q %zmm8, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqa 960(%rdi), %ymm15
+; AVX512F-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vextracti32x4 $1, %ymm15, %xmm19
+; AVX512F-NEXT:    vmovdqa 512(%rdi), %ymm15
+; AVX512F-NEXT:    vpalignr {{.*#+}} ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vextracti128 $1, %ymm15, %xmm15
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm2, %zmm9
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm8, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm7, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm7, %zmm11
+; AVX512F-NEXT:    movb $-32, %al
+; AVX512F-NEXT:    kmovw %eax, %k2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm17 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm18 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm12 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm21 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm1 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm3 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm22 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm29 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm26 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm20 {%k2}
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm19, %zmm24, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm4 {%k2}
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm15, %zmm27, %zmm6
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm6 {%k2}
+; AVX512F-NEXT:    vmovdqa64 %ymm23, %ymm7
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm7 = ymm9[0,1,2,3],ymm7[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa 64(%rdi), %ymm9
+; AVX512F-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vextracti128 $1, %ymm9, %xmm9
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm9, %zmm25, %zmm9
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm7, %zmm15, %zmm19
+; AVX512F-NEXT:    vmovdqa 640(%rdi), %ymm15
+; AVX512F-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vmovdqa 1408(%rdi), %ymm7
+; AVX512F-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vextracti128 $1, %ymm7, %xmm7
+; AVX512F-NEXT:    vinserti32x4 $0, %xmm7, %zmm16, %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm7 {%k2}
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm13 = ymm13[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
+; AVX512F-NEXT:    vmovdqa 192(%rdi), %ymm15
+; AVX512F-NEXT:    vpalignr $8, (%rsp), %ymm15, %ymm15 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm10 = ymm10[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
+; AVX512F-NEXT:    vmovdqa 1536(%rdi), %ymm15
+; AVX512F-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm15 = mem[8,9,10,11,12,13,14,15],ymm15[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm15[16,17,18,19,20,21,22,23]
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm30, %zmm2
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vblendps $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm15 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm15 = ymm15[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vinsertf64x4 $0, %ymm15, %zmm16, %zmm15
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm14, %zmm16, %zmm14
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm16, %zmm8
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11, %ymm11 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm11 = ymm11[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm11, %zmm16, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
+; AVX512F-NEXT:    vmovdqa64 %zmm12, 128(%rsi)
+; AVX512F-NEXT:    vmovdqa64 %zmm18, 64(%rsi)
+; AVX512F-NEXT:    vmovdqa64 %zmm17, (%rsi)
+; AVX512F-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, (%rdx)
+; AVX512F-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 128(%rdx)
+; AVX512F-NEXT:    vmovdqa64 %zmm20, 192(%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm26, (%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm29, 64(%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm22, 128(%rcx)
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 192(%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm9, (%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm6, 64(%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm4, 128(%r8)
+; AVX512F-NEXT:    vmovdqa64 %zmm28, 192(%r9)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, (%r9)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 64(%r9)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 128(%r9)
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 192(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm10, (%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm13, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm19, 128(%rax)
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm11, 128(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm8, 192(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm14, (%rax)
+; AVX512F-NEXT:    vmovaps %zmm15, 64(%rax)
+; AVX512F-NEXT:    addq $2216, %rsp # imm = 0x8A8
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: load_i64_stride7_vf32:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $2152, %rsp # imm = 0x868
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1664(%rdi), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1600(%rdi), %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1216(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1152(%rdi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1088(%rdi), %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1024(%rdi), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 768(%rdi), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 704(%rdi), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 640(%rdi), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovaps 576(%rdi), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
-; AVX512DQBW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm31, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
-; AVX512DQBW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
-; AVX512DQBW-ONLY-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm19 = [12,5,12,5,12,5,12,5]
-; AVX512DQBW-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm19, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm7, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
-; AVX512DQBW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm28, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm28, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm29 = [9,0,7,0,9,0,7,0]
-; AVX512DQBW-ONLY-NEXT:    # zmm29 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm29, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm7, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm31, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1536(%rdi), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1472(%rdi), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm19, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm7, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm28, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm5, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm5, %zmm21, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 832(%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1280(%rdi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1728(%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm4, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    movb $24, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 512(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm21, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm24 {%k1} = zmm14[4,5,4,5],zmm11[4,5,4,5]
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [14,0,0,7,14,0,0,7]
-; AVX512DQBW-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm11, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [7,0,9,0,7,0,9,0]
-; AVX512DQBW-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm17, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [4,11,4,11]
-; AVX512DQBW-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm19, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm21, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm0[4,5,4,5],zmm22[4,5,4,5]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm17, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 960(%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 896(%rdi), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm21, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1408(%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1344(%rdi), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm6[4,5,4,5],zmm3[4,5,4,5]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm3, %zmm6, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm3, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm16 {%k1} = zmm1[4,5,4,5],zmm22[4,5,4,5]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 912(%rdi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} xmm1 = xmm1[0,1],mem[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 464(%rdi), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm13, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 1360(%rdi), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm0, %zmm11, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 1024(%rdi), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm1 = [4,11]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm20, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 576(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 1472(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm1 = [5,12]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa {{.*#+}} xmm6 = [6,13]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm0, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm8, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm6, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 1088(%rdi), %ymm5
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} ymm5 = ymm19[8,9,10,11,12,13,14,15],ymm5[0,1,2,3,4,5,6,7],ymm19[24,25,26,27,28,29,30,31],ymm5[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm12, %zmm28
-; AVX512DQBW-ONLY-NEXT:    movb $-32, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm24 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm12 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm19 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm23 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm22 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 960(%rdi), %ymm6
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} ymm6 = mem[8,9,10,11,12,13,14,15],ymm6[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm6[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vextracti128 $1, %ymm6, %xmm6
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm6, %zmm25, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm6 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 512(%rdi), %ymm7
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vextracti128 $1, %ymm7, %xmm7
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm7, %zmm26, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm7 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm9
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vextracti128 $1, %ymm9, %xmm9
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm9, %zmm30, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 1408(%rdi), %ymm20
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} ymm20 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vextracti32x4 $1, %ymm20, %xmm20
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $0, %xmm20, %zmm17, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm17 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 640(%rdi), %ymm20
-; AVX512DQBW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm20, %ymm5 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm5 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm13, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm13
-; AVX512DQBW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm13[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm11, %zmm13, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 1536(%rdi), %ymm13
-; AVX512DQBW-ONLY-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm13[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm29, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm13 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm13 = ymm15[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm14, %zmm15, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm15, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm10 = ymm10[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, 128(%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, 64(%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, (%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, (%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, 128(%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, 192(%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, (%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, 64(%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, 128(%rcx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, 192(%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, (%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 64(%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 128(%r8)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 192(%r9)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%r9)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 64(%r9)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 128(%r9)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, (%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm28, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, (%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $2152, %rsp # imm = 0x868
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: load_i64_stride7_vf32:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $2152, %rsp # imm = 0x868
+; AVX512BW-NEXT:    vmovdqa64 1664(%rdi), %zmm21
+; AVX512BW-NEXT:    vmovdqa64 1600(%rdi), %zmm31
+; AVX512BW-NEXT:    vmovdqa64 1216(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 1152(%rdi), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 1088(%rdi), %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 1024(%rdi), %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 768(%rdi), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 704(%rdi), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 640(%rdi), %zmm11
+; AVX512BW-NEXT:    vmovaps 576(%rdi), %zmm0
+; AVX512BW-NEXT:    vmovups %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 256(%rdi), %zmm5
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [10,3,10,3,10,3,10,3]
+; AVX512BW-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm7, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm7, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm31, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [11,4,11,4,11,4,11,4]
+; AVX512BW-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm7, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm13
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm7 = [13,6,13,6,13,6,13,6]
+; AVX512BW-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm13, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm13, %zmm18
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm19 = [12,5,12,5,12,5,12,5]
+; AVX512BW-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm19, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm19, %zmm22
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm7, %zmm16
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm7, %zmm22
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm24
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [0,7,14,0,0,7,14,0]
+; AVX512BW-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm28, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm28, %zmm25
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm29 = [9,0,7,0,9,0,7,0]
+; AVX512BW-NEXT:    # zmm29 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm29, %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm30
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm4 = [2,9,2,9,2,9,2,9]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm4, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm29, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm4, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm7, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm7, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm7, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm5
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm31, %zmm13
+; AVX512BW-NEXT:    vmovdqa64 1536(%rdi), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 1472(%rdi), %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm7, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm31
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm19, %zmm31
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm5, %zmm19
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm7, %zmm27
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm5, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm28, %zmm20
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm5, %zmm28
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm29, %zmm12
+; AVX512BW-NEXT:    vpermi2q %zmm5, %zmm21, %zmm29
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm4, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,9,0,5,6,9]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm5
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 832(%rdi), %zmm5
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 1280(%rdi), %zmm6
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 1728(%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,10,0,5,6,10]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,11,0,5,6,11]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm31
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm17
+; AVX512BW-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm26
+; AVX512BW-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [0,5,6,12,0,5,6,12]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm16
+; AVX512BW-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm24
+; AVX512BW-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,13,4,5,6,13]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm25
+; AVX512BW-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm28
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,6,14,4,5,6,14]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm30
+; AVX512BW-NEXT:    vmovdqu64 %zmm30, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm29
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm4 = [4,5,8,15,4,5,8,15]
+; AVX512BW-NEXT:    # zmm4 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm4, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm4, %zmm21
+; AVX512BW-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    movb $24, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqa64 512(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %zmm15
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} ymm21 = <0,7,14,u>
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm21, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm24 {%k1} = zmm14[4,5,4,5],zmm11[4,5,4,5]
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [14,0,0,7,14,0,0,7]
+; AVX512BW-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm13
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm11, %zmm13
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [7,0,9,0,7,0,9,0]
+; AVX512BW-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm17, %zmm26
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [4,11,4,11]
+; AVX512BW-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm19, %zmm14
+; AVX512BW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm14
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm21, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm18 {%k1} = zmm0[4,5,4,5],zmm22[4,5,4,5]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm11, %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm30
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm17, %zmm30
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm19, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 960(%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 896(%rdi), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm21, %zmm16
+; AVX512BW-NEXT:    vmovdqa64 1408(%rdi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 1344(%rdi), %zmm8
+; AVX512BW-NEXT:    vpermi2q %zmm5, %zmm8, %zmm21
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm21 {%k1} = zmm6[4,5,4,5],zmm3[4,5,4,5]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermi2q %zmm3, %zmm6, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm17, %zmm25
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm3, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm19, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm16 {%k1} = zmm1[4,5,4,5],zmm22[4,5,4,5]
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm19, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 912(%rdi), %xmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} xmm1 = xmm1[0,1],mem[2,3]
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm1, %zmm0, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 464(%rdi), %xmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm0, %zmm13, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 16(%rdi), %xmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm0, %zmm23, %zmm23
+; AVX512BW-NEXT:    vmovdqa 1360(%rdi), %xmm0
+; AVX512BW-NEXT:    vpblendd {{.*#+}} xmm0 = xmm0[0,1],mem[2,3]
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm0, %zmm11, %zmm22
+; AVX512BW-NEXT:    vinserti128 $1, 1088(%rdi), %ymm0, %ymm0
+; AVX512BW-NEXT:    vmovdqa 1024(%rdi), %ymm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3,4,5],ymm0[6,7]
+; AVX512BW-NEXT:    vmovdqa64 %ymm1, %ymm19
+; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm1 = [4,11]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm11
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm11
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm11[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm20, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti128 $1, 640(%rdi), %ymm0, %ymm0
+; AVX512BW-NEXT:    vmovdqa 576(%rdi), %ymm3
+; AVX512BW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm1, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti128 $1, 192(%rdi), %ymm0, %ymm0
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %ymm3
+; AVX512BW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm1, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti128 $1, 1536(%rdi), %ymm0, %ymm0
+; AVX512BW-NEXT:    vmovdqa 1472(%rdi), %ymm3
+; AVX512BW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm3[0,1,2,3,4,5],ymm0[6,7]
+; AVX512BW-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm1[0,1,2,3],ymm0[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa {{.*#+}} ymm0 = <9,0,7,u>
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm1 = [5,12]
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm13
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm1, %zmm13
+; AVX512BW-NEXT:    vmovdqa {{.*#+}} xmm6 = [6,13]
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm6, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm0, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm11
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm1, %zmm11
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm6, %zmm14
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm0, %zmm4
+; AVX512BW-NEXT:    vpermi2q %zmm8, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm9
+; AVX512BW-NEXT:    vpermi2q %zmm5, %zmm8, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm6, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm6, %zmm10
+; AVX512BW-NEXT:    vmovdqa 1088(%rdi), %ymm5
+; AVX512BW-NEXT:    vpalignr {{.*#+}} ymm5 = ymm19[8,9,10,11,12,13,14,15],ymm5[0,1,2,3,4,5,6,7],ymm19[24,25,26,27,28,29,30,31],ymm5[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm5 = ymm9[0,1,2,3],ymm5[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm5, %zmm12, %zmm28
+; AVX512BW-NEXT:    movb $-32, %al
+; AVX512BW-NEXT:    kmovd %eax, %k2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm18 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm24 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm16 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm4 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm3 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm2 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm12 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm19 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm23 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm22 {%k2}
+; AVX512BW-NEXT:    vmovdqa 960(%rdi), %ymm6
+; AVX512BW-NEXT:    vpalignr {{.*#+}} ymm6 = mem[8,9,10,11,12,13,14,15],ymm6[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm6[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vextracti128 $1, %ymm6, %xmm6
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm6, %zmm25, %zmm6
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm6 {%k2}
+; AVX512BW-NEXT:    vmovdqa 512(%rdi), %ymm7
+; AVX512BW-NEXT:    vpalignr {{.*#+}} ymm7 = mem[8,9,10,11,12,13,14,15],ymm7[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm7[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vextracti128 $1, %ymm7, %xmm7
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm7, %zmm26, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm7 {%k2}
+; AVX512BW-NEXT:    vmovdqa 64(%rdi), %ymm9
+; AVX512BW-NEXT:    vpalignr {{.*#+}} ymm9 = mem[8,9,10,11,12,13,14,15],ymm9[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm9[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vextracti128 $1, %ymm9, %xmm9
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm9, %zmm30, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 1408(%rdi), %ymm20
+; AVX512BW-NEXT:    vpalignr {{.*#+}} ymm20 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vextracti32x4 $1, %ymm20, %xmm20
+; AVX512BW-NEXT:    vinserti32x4 $0, %xmm20, %zmm17, %zmm17
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm17 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 640(%rdi), %ymm20
+; AVX512BW-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm20, %ymm5 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm5 = mem[8,9,10,11,12,13,14,15],ymm20[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm20[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm5 = ymm13[0,1,2,3],ymm5[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm5, %zmm13, %zmm5
+; AVX512BW-NEXT:    vmovdqa 192(%rdi), %ymm13
+; AVX512BW-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm11 = ymm11[0,1,2,3],ymm13[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm11, %zmm13, %zmm11
+; AVX512BW-NEXT:    vmovdqa 1536(%rdi), %ymm13
+; AVX512BW-NEXT:    vpalignr $8, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13, %ymm13 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm13 = mem[8,9,10,11,12,13,14,15],ymm13[0,1,2,3,4,5,6,7],mem[24,25,26,27,28,29,30,31],ymm13[16,17,18,19,20,21,22,23]
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm13[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm29, %zmm1
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15, %ymm13 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm13 = ymm15[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm13, %zmm15, %zmm13
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14, %ymm14 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm14 = ymm14[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm14, %zmm15, %zmm14
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8, %ymm8 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm8 = ymm8[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm15, %zmm8
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm10, %ymm10 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm10 = ymm10[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm15, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, 192(%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, 128(%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, 64(%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, (%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, 192(%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, (%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, 64(%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, 128(%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, 192(%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, (%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, 64(%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, 128(%rcx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, 192(%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, (%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 64(%r8)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 128(%r8)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 192(%r9)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%r9)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 64(%r9)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 128(%r9)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 192(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, (%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm28, 128(%rax)
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, 128(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, 192(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, (%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, 64(%rax)
+; AVX512BW-NEXT:    addq $2152, %rsp # imm = 0x868
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %wide.vec = load <224 x i64>, ptr %in.vec, align 64
   %strided.vec0 = shufflevector <224 x i64> %wide.vec, <224 x i64> poison, <32 x i32> <i32 0, i32 7, i32 14, i32 21, i32 28, i32 35, i32 42, i32 49, i32 56, i32 63, i32 70, i32 77, i32 84, i32 91, i32 98, i32 105, i32 112, i32 119, i32 126, i32 133, i32 140, i32 147, i32 154, i32 161, i32 168, i32 175, i32 182, i32 189, i32 196, i32 203, i32 210, i32 217>
   %strided.vec1 = shufflevector <224 x i64> %wide.vec, <224 x i64> poison, <32 x i32> <i32 1, i32 8, i32 15, i32 22, i32 29, i32 36, i32 43, i32 50, i32 57, i32 64, i32 71, i32 78, i32 85, i32 92, i32 99, i32 106, i32 113, i32 120, i32 127, i32 134, i32 141, i32 148, i32 155, i32 162, i32 169, i32 176, i32 183, i32 190, i32 197, i32 204, i32 211, i32 218>
@@ -10864,14 +9973,18 @@ define void @load_i64_stride7_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, pt
 ; AVX2-FAST-PERLANE: {{.*}}
 ; AVX2-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
+; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-FAST: {{.*}}
+; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}
 ; AVX512F-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-3.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-3.ll
index e847933adf0abcf..bac0fbe9df6f607 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-3.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-3.ll
@@ -1467,79 +1467,42 @@ define void @load_i8_stride3_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr
 ; AVX512F-NEXT:    vzeroupper
 ; AVX512F-NEXT:    retq
 ;
-; AVX512BW-ONLY-LABEL: load_i8_stride3_vf64:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %xmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa 32(%rdi), %xmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa 96(%rdi), %xmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa 112(%rdi), %xmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm5
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 144(%rdi), %ymm3, %ymm3
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 48(%rdi), %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 160(%rdi), %ymm4, %ymm3
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 64(%rdi), %ymm1, %ymm1
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 176(%rdi), %ymm5, %ymm3
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 80(%rdi), %ymm2, %ymm2
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13]
-; AVX512BW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpshufb %zmm3, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vpshufb %zmm3, %zmm1, %zmm1
-; AVX512BW-ONLY-NEXT:    vpshufb %zmm3, %zmm2, %zmm2
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm3 = zmm2[11,12,13,14,15],zmm0[0,1,2,3,4,5,6,7,8,9,10],zmm2[27,28,29,30,31],zmm0[16,17,18,19,20,21,22,23,24,25,26],zmm2[43,44,45,46,47],zmm0[32,33,34,35,36,37,38,39,40,41,42],zmm2[59,60,61,62,63],zmm0[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm1 = zmm1[11,12,13,14,15],zmm2[0,1,2,3,4,5,6,7,8,9,10],zmm1[27,28,29,30,31],zmm2[16,17,18,19,20,21,22,23,24,25,26],zmm1[43,44,45,46,47],zmm2[32,33,34,35,36,37,38,39,40,41,42],zmm1[59,60,61,62,63],zmm2[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512BW-ONLY-NEXT:    movabsq $-576188069258921984, %rax # imm = 0xF800F800F800F800
-; AVX512BW-ONLY-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-NEXT:    vpblendmb %zmm1, %zmm0, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm1 = zmm3[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm3[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm3[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm3[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm3[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm3[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm3[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm3[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512BW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[10,11,12,13,14,15,0,1,2,3,4,5,6,7,8,9,26,27,28,29,30,31,16,17,18,19,20,21,22,23,24,25,42,43,44,45,46,47,32,33,34,35,36,37,38,39,40,41,58,59,60,61,62,63,48,49,50,51,52,53,54,55,56,57]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, (%rsi)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, (%rdx)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, (%rcx)
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
-;
-; AVX512DQBW-ONLY-LABEL: load_i8_stride3_vf64:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 16(%rdi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 32(%rdi), %xmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 96(%rdi), %xmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 112(%rdi), %xmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm5
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 144(%rdi), %ymm3, %ymm3
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 48(%rdi), %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 160(%rdi), %ymm4, %ymm3
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 64(%rdi), %ymm1, %ymm1
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 176(%rdi), %ymm5, %ymm3
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 80(%rdi), %ymm2, %ymm2
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} zmm3 = [0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm3 = mem[0,1,0,1,0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpshufb %zmm3, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpshufb %zmm3, %zmm1, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpshufb %zmm3, %zmm2, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm3 = zmm2[11,12,13,14,15],zmm0[0,1,2,3,4,5,6,7,8,9,10],zmm2[27,28,29,30,31],zmm0[16,17,18,19,20,21,22,23,24,25,26],zmm2[43,44,45,46,47],zmm0[32,33,34,35,36,37,38,39,40,41,42],zmm2[59,60,61,62,63],zmm0[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm1 = zmm1[11,12,13,14,15],zmm2[0,1,2,3,4,5,6,7,8,9,10],zmm1[27,28,29,30,31],zmm2[16,17,18,19,20,21,22,23,24,25,26],zmm1[43,44,45,46,47],zmm2[32,33,34,35,36,37,38,39,40,41,42],zmm1[59,60,61,62,63],zmm2[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512DQBW-ONLY-NEXT:    movabsq $-576188069258921984, %rax # imm = 0xF800F800F800F800
-; AVX512DQBW-ONLY-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-ONLY-NEXT:    vpblendmb %zmm1, %zmm0, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm1 = zmm3[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm3[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm3[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm3[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm3[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm3[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm3[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm3[48,49,50,51,52,53,54,55,56,57,58]
-; AVX512DQBW-ONLY-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[10,11,12,13,14,15,0,1,2,3,4,5,6,7,8,9,26,27,28,29,30,31,16,17,18,19,20,21,22,23,24,25,42,43,44,45,46,47,32,33,34,35,36,37,38,39,40,41,58,59,60,61,62,63,48,49,50,51,52,53,54,55,56,57]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, (%rsi)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, (%rdx)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, (%rcx)
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: load_i8_stride3_vf64:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    vmovdqa (%rdi), %xmm0
+; AVX512BW-NEXT:    vmovdqa 16(%rdi), %xmm1
+; AVX512BW-NEXT:    vmovdqa 32(%rdi), %xmm2
+; AVX512BW-NEXT:    vmovdqa 96(%rdi), %xmm3
+; AVX512BW-NEXT:    vmovdqa 112(%rdi), %xmm4
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %xmm5
+; AVX512BW-NEXT:    vinserti128 $1, 144(%rdi), %ymm3, %ymm3
+; AVX512BW-NEXT:    vinserti128 $1, 48(%rdi), %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
+; AVX512BW-NEXT:    vinserti128 $1, 160(%rdi), %ymm4, %ymm3
+; AVX512BW-NEXT:    vinserti128 $1, 64(%rdi), %ymm1, %ymm1
+; AVX512BW-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
+; AVX512BW-NEXT:    vinserti128 $1, 176(%rdi), %ymm5, %ymm3
+; AVX512BW-NEXT:    vinserti128 $1, 80(%rdi), %ymm2, %ymm2
+; AVX512BW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13,0,3,6,9,12,15,2,5,8,11,14,1,4,7,10,13]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpshufb %zmm3, %zmm0, %zmm0
+; AVX512BW-NEXT:    vpshufb %zmm3, %zmm1, %zmm1
+; AVX512BW-NEXT:    vpshufb %zmm3, %zmm2, %zmm2
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm3 = zmm2[11,12,13,14,15],zmm0[0,1,2,3,4,5,6,7,8,9,10],zmm2[27,28,29,30,31],zmm0[16,17,18,19,20,21,22,23,24,25,26],zmm2[43,44,45,46,47],zmm0[32,33,34,35,36,37,38,39,40,41,42],zmm2[59,60,61,62,63],zmm0[48,49,50,51,52,53,54,55,56,57,58]
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm1 = zmm1[11,12,13,14,15],zmm2[0,1,2,3,4,5,6,7,8,9,10],zmm1[27,28,29,30,31],zmm2[16,17,18,19,20,21,22,23,24,25,26],zmm1[43,44,45,46,47],zmm2[32,33,34,35,36,37,38,39,40,41,42],zmm1[59,60,61,62,63],zmm2[48,49,50,51,52,53,54,55,56,57,58]
+; AVX512BW-NEXT:    movabsq $-576188069258921984, %rax # imm = 0xF800F800F800F800
+; AVX512BW-NEXT:    kmovq %rax, %k1
+; AVX512BW-NEXT:    vpblendmb %zmm1, %zmm0, %zmm2 {%k1}
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm1 = zmm3[11,12,13,14,15],zmm1[0,1,2,3,4,5,6,7,8,9,10],zmm3[27,28,29,30,31],zmm1[16,17,18,19,20,21,22,23,24,25,26],zmm3[43,44,45,46,47],zmm1[32,33,34,35,36,37,38,39,40,41,42],zmm3[59,60,61,62,63],zmm1[48,49,50,51,52,53,54,55,56,57,58]
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[11,12,13,14,15],zmm3[0,1,2,3,4,5,6,7,8,9,10],zmm0[27,28,29,30,31],zmm3[16,17,18,19,20,21,22,23,24,25,26],zmm0[43,44,45,46,47],zmm3[32,33,34,35,36,37,38,39,40,41,42],zmm0[59,60,61,62,63],zmm3[48,49,50,51,52,53,54,55,56,57,58]
+; AVX512BW-NEXT:    vpalignr {{.*#+}} zmm0 = zmm0[10,11,12,13,14,15,0,1,2,3,4,5,6,7,8,9,26,27,28,29,30,31,16,17,18,19,20,21,22,23,24,25,42,43,44,45,46,47,32,33,34,35,36,37,38,39,40,41,58,59,60,61,62,63,48,49,50,51,52,53,54,55,56,57]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, (%rsi)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, (%rdx)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, (%rcx)
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %wide.vec = load <192 x i8>, ptr %in.vec, align 64
   %strided.vec0 = shufflevector <192 x i8> %wide.vec, <192 x i8> poison, <64 x i32> <i32 0, i32 3, i32 6, i32 9, i32 12, i32 15, i32 18, i32 21, i32 24, i32 27, i32 30, i32 33, i32 36, i32 39, i32 42, i32 45, i32 48, i32 51, i32 54, i32 57, i32 60, i32 63, i32 66, i32 69, i32 72, i32 75, i32 78, i32 81, i32 84, i32 87, i32 90, i32 93, i32 96, i32 99, i32 102, i32 105, i32 108, i32 111, i32 114, i32 117, i32 120, i32 123, i32 126, i32 129, i32 132, i32 135, i32 138, i32 141, i32 144, i32 147, i32 150, i32 153, i32 156, i32 159, i32 162, i32 165, i32 168, i32 171, i32 174, i32 177, i32 180, i32 183, i32 186, i32 189>
   %strided.vec1 = shufflevector <192 x i8> %wide.vec, <192 x i8> poison, <64 x i32> <i32 1, i32 4, i32 7, i32 10, i32 13, i32 16, i32 19, i32 22, i32 25, i32 28, i32 31, i32 34, i32 37, i32 40, i32 43, i32 46, i32 49, i32 52, i32 55, i32 58, i32 61, i32 64, i32 67, i32 70, i32 73, i32 76, i32 79, i32 82, i32 85, i32 88, i32 91, i32 94, i32 97, i32 100, i32 103, i32 106, i32 109, i32 112, i32 115, i32 118, i32 121, i32 124, i32 127, i32 130, i32 133, i32 136, i32 139, i32 142, i32 145, i32 148, i32 151, i32 154, i32 157, i32 160, i32 163, i32 166, i32 169, i32 172, i32 175, i32 178, i32 181, i32 184, i32 187, i32 190>
@@ -1558,6 +1521,7 @@ define void @load_i8_stride3_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr
 ; AVX512-FAST: {{.*}}
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
@@ -1565,6 +1529,7 @@ define void @load_i8_stride3_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr
 ; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-FAST: {{.*}}
 ; AVX512F-ONLY: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-7.ll
index 370a7d221369235..0e3f8121db1de0e 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-load-i8-stride-7.ll
@@ -12065,8 +12065,8 @@ define void @load_i8_stride7_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr
 ; AVX512DQBW-FAST-NEXT:    vpshufb {{.*#+}} xmm20 = xmm20[u,u,u],zero,zero,xmm20[4,11],zero,zero,xmm20[0,7,14,u,u,u,u]
 ; AVX512DQBW-FAST-NEXT:    vporq %xmm23, %xmm20, %xmm20
 ; AVX512DQBW-FAST-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm15
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm20 = [1,3,4,6,1,3,4,6]
-; AVX512DQBW-FAST-NEXT:    # ymm20 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [1,3,4,6,1,3,4,6]
+; AVX512DQBW-FAST-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vpermd %ymm19, %ymm20, %ymm20
 ; AVX512DQBW-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,17,20,27,30]
 ; AVX512DQBW-FAST-NEXT:    vpblendd {{.*#+}} ymm14 = ymm15[0,1,2,3,4,5,6],ymm14[7]
@@ -12089,8 +12089,8 @@ define void @load_i8_stride7_vf64(ptr %in.vec, ptr %out.vec0, ptr %out.vec1, ptr
 ; AVX512DQBW-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,0,7,14],zero,zero,xmm14[3,10],zero,zero,zero,xmm14[u,u,u,u]
 ; AVX512DQBW-FAST-NEXT:    vporq %xmm20, %xmm14, %xmm14
 ; AVX512DQBW-FAST-NEXT:    vinserti128 $1, %xmm14, %ymm0, %ymm14
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm20 = [1,3,5,6,1,3,5,6]
-; AVX512DQBW-FAST-NEXT:    # ymm20 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [1,3,5,6,1,3,5,6]
+; AVX512DQBW-FAST-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vpermd %ymm19, %ymm20, %ymm19
 ; AVX512DQBW-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm19[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,21,24,31]
 ; AVX512DQBW-FAST-NEXT:    vpblendd {{.*#+}} ymm0 = ymm14[0,1,2,3,4,5,6],ymm0[7]

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-7.ll
index 3a0e1d92b48c05a..5934b80893ce390 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i16-stride-7.ll
@@ -2315,537 +2315,271 @@ define void @store_i16_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-FAST-PERLANE-NEXT:    vzeroupper
 ; AVX2-FAST-PERLANE-NEXT:    retq
 ;
-; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf16:
-; AVX512F-ONLY-SLOW:       # %bb.0:
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rax), %ymm13
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u],zero,zero,ymm7[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[16,17,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,14,15],zero,zero,ymm6[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm6[u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm16
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm15
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm21
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm9[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm9[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm8[12,13,14,15],zero,zero,ymm8[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm8[u,u,u,u,u,u,u,u,16,17,18,19]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm17
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm5 = ymm3[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[16,17,u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm2[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm2[u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %ymm5, %ymm10, %ymm19
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpandn %ymm5, %ymm10, %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm13[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[16,17,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm7[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm22
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm10[2],ymm12[3,4],ymm10[5],ymm12[6,7,8,9],ymm10[10],ymm12[11,12],ymm10[13],ymm12[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm8[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm11 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm11[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm11 = ymm11[0,1],ymm10[2],ymm11[3,4],ymm10[5],ymm11[6,7,8,9],ymm10[10],ymm11[11,12],ymm10[13],ymm11[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm20
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm4, %xmm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm18 = [0,16,0,1,17,17,2,0,0,16,0,1,17,17,2,0]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm12, %zmm11, %zmm18
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3],xmm14[4],xmm15[5,6],xmm14[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm15
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm0, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm2[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1,2],ymm0[3],ymm14[4,5],ymm0[6],ymm14[7,8,9,10],ymm0[11],ymm14[12,13],ymm0[14],ymm14[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm11 = xmm0[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm11, %zmm0, %zmm15
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm3, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm2[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5,6,7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm21, %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm7 = xmm7[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm10[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm22[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm12[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm14[2,2,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermd %zmm13, %zmm3, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm13 = ymm13[2,3,3,3,6,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm7, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm4, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm5
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm20[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm12, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm1[0,0,1,1,4,4,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd (%rax), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm13[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm0, 192(%rcx)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm1, (%rcx)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm3, 128(%rcx)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm5, 64(%rcx)
-; AVX512F-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512F-ONLY-SLOW-NEXT:    retq
-;
-; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf16:
-; AVX512F-ONLY-FAST:       # %bb.0:
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm7[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm5[12,13,14,15],zero,zero,ymm5[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm5[u,u,u,u,u,u,u,u,16,17,18,19]
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm16
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %xmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %xmm12
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u],zero,zero,ymm6[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[16,17,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,14,15],zero,zero,ymm4[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm4[u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm17
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[16,17,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm1[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm1[u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm3 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <2,u,3,2,u,10,10,11>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm3[2],ymm10[3,4],ymm3[5],ymm10[6,7,8,9],ymm3[10],ymm10[11,12],ymm3[13],ymm10[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm19
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm2, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm13[0,1,2],ymm3[3],ymm13[4,5],ymm3[6],ymm13[7,8,9,10],ymm3[11],ymm13[12,13],ymm3[14],ymm13[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm13
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm12, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2],xmm3[3,4],xmm0[5],xmm3[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm15 = [0,8,1,9,0,8,1,9]
-; AVX512F-ONLY-FAST-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm15
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm8 = xmm8[0],xmm14[1],xmm8[2,3],xmm14[4],xmm8[5,6],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm12, %xmm11, %xmm11
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm11, %zmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm12
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm10, %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm8[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,0,0,1,8,9,9,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm14, %zmm8, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm8 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm8[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[16,17,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm10, %zmm10
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm5[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5],ymm7[6],ymm5[7,8,9,10],ymm7[11],ymm5[12,13],ymm7[14],ymm5[15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512F-ONLY-FAST-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm8, %zmm6, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <6,u,u,u,7,u,u,7>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm8, %ymm7, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm12[0,0,1,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm16, %zmm3, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm8, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm10
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm19[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm11[0,0,1,1,4,4,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd (%rax), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm1, 192(%rcx)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm2, (%rcx)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm6, 128(%rcx)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm10, 64(%rcx)
-; AVX512F-ONLY-FAST-NEXT:    vzeroupper
-; AVX512F-ONLY-FAST-NEXT:    retq
-;
-; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf16:
-; AVX512DQ-SLOW:       # %bb.0:
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %ymm8
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %ymm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %ymm6
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %ymm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rax), %ymm13
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u],zero,zero,ymm7[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[16,17,u,u,u,u,u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,14,15],zero,zero,ymm6[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm6[u,u,u,u,u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm16
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %xmm15
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm21
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm9[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm9[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm8[12,13,14,15],zero,zero,ymm8[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm8[u,u,u,u,u,u,u,u,16,17,18,19]
-; AVX512DQ-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm17
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm5 = ymm3[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[16,17,u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm2[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm2[u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vporq %ymm5, %ymm10, %ymm19
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm5
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpandn %ymm5, %ymm10, %ymm5
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm13[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[16,17,u,u]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm7[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm22
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm10[2],ymm12[3,4],ymm10[5],ymm12[6,7,8,9],ymm10[10],ymm12[11,12],ymm10[13],ymm12[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm8[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm11 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm11[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm11 = ymm11[0,1],ymm10[2],ymm11[3,4],ymm10[5],ymm11[6,7,8,9],ymm10[10],ymm11[11,12],ymm10[13],ymm11[14,15]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm20
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm4, %xmm11
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm18 = [0,16,0,1,17,17,2,0,0,16,0,1,17,17,2,0]
-; AVX512DQ-SLOW-NEXT:    # zmm18 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm12, %zmm11, %zmm18
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %xmm11
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3],xmm14[4],xmm15[5,6],xmm14[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %xmm15
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm0, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm2[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1,2],ymm0[3],ymm14[4,5],ymm0[6],ymm14[7,8,9,10],ymm0[11],ymm14[12,13],ymm0[14],ymm14[15]
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm11 = xmm0[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm11, %zmm0, %zmm15
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm3, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm2[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5,6,7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm21, %xmm7
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm7 = xmm7[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm10[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm22[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm12[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm14[2,2,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm3 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512DQ-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermd %zmm13, %zmm3, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm13 = ymm13[2,3,3,3,6,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm7, %zmm7
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm4, %zmm4
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm5
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm4
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm20[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm7
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm12, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm1[0,0,1,1,4,4,5,5]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd (%rax), %ymm1
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm4
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm6
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm13[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm0, 192(%rcx)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm1, (%rcx)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm3, 128(%rcx)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm5, 64(%rcx)
-; AVX512DQ-SLOW-NEXT:    vzeroupper
-; AVX512DQ-SLOW-NEXT:    retq
+; AVX512F-SLOW-LABEL: store_i16_stride7_vf16:
+; AVX512F-SLOW:       # %bb.0:
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %ymm8
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %ymm9
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %ymm6
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %ymm7
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa (%rax), %ymm13
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm7[u,u,u,u,u,u],zero,zero,ymm7[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm7[16,17,u,u,u,u,u,u,u,u]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm6[u,u,u,u,u,u,14,15],zero,zero,ymm6[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm6[u,u,u,u,u,u,u,u]
+; AVX512F-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm16
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %xmm14
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %xmm15
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm15[4],xmm14[4],xmm15[5],xmm14[5],xmm15[6],xmm14[6],xmm15[7],xmm14[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm21
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = zero,zero,zero,zero,ymm9[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm9[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm4 = ymm8[12,13,14,15],zero,zero,ymm8[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm8[u,u,u,u,u,u,u,u,16,17,18,19]
+; AVX512F-SLOW-NEXT:    vporq %ymm1, %ymm4, %ymm17
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %xmm4
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm5 = ymm3[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[16,17,u,u,u,u]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm2[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm2[u,u,u,u]
+; AVX512F-SLOW-NEXT:    vporq %ymm5, %ymm10, %ymm19
+; AVX512F-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm5
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpandn %ymm5, %ymm10, %ymm5
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm13[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm13[16,17,u,u]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm7[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm22
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm6[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm12 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm12[0,1],ymm10[2],ymm12[3,4],ymm10[5],ymm12[6,7,8,9],ymm10[10],ymm12[11,12],ymm10[13],ymm12[14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm8[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm11 = ymm9[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm11[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm11 = ymm11[0,1],ymm10[2],ymm11[3,4],ymm10[5],ymm11[6,7,8,9],ymm10[10],ymm11[11,12],ymm10[13],ymm11[14,15]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm20
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm4, %xmm11
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm12 = xmm1[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm11 = xmm12[0,1],xmm11[2],xmm12[3,4],xmm11[5],xmm12[6,7]
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm12 = xmm15[0],xmm14[0],xmm15[1],xmm14[1],xmm15[2],xmm14[2],xmm15[3],xmm14[3]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm12[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm18 = [0,16,0,1,17,17,2,0,0,16,0,1,17,17,2,0]
+; AVX512F-SLOW-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm12, %zmm11, %zmm18
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm8[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm9[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm11[0],ymm12[1],ymm11[2,3],ymm12[4],ymm11[5,6,7,8],ymm12[9],ymm11[10,11],ymm12[12],ymm11[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %xmm11
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm15[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm14 = xmm15[0],xmm14[1],xmm15[2,3],xmm14[4],xmm15[5,6],xmm14[7]
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %xmm15
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm15[4],xmm11[4],xmm15[5],xmm11[5],xmm15[6],xmm11[6],xmm15[7],xmm11[7]
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm0, %zmm1
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm2[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1,2],ymm0[3],ymm14[4,5],ymm0[6],ymm14[7,8,9,10],ymm0[11],ymm14[12,13],ymm0[14],ymm14[15]
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm11[0],xmm15[1],xmm11[1],xmm15[2],xmm11[2],xmm15[3],xmm11[3]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm11 = xmm0[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm11, %zmm0, %zmm15
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm3, %ymm0
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm2[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm9[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm8 = ymm8[0,1,2],ymm9[3],ymm8[4,5],ymm9[6],ymm8[7,8,9,10],ymm9[11],ymm8[12,13],ymm9[14],ymm8[15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm7 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0],ymm7[1],ymm6[2,3],ymm7[4],ymm6[5,6,7,8],ymm7[9],ymm6[10,11],ymm7[12],ymm6[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm21, %xmm7
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm7 = xmm7[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm10[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm9 = xmm9[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm22[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm12[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm14[2,2,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm3[0,1],ymm2[2],ymm3[3,4],ymm2[5],ymm3[6,7,8,9],ymm2[10],ymm3[11,12],ymm2[13],ymm3[14,15]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
+; AVX512F-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpermd %zmm13, %zmm3, %zmm3
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm13 = ymm13[2,3,3,3,6,7,7,7]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm7, %zmm7
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm4, %zmm4
+; AVX512F-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm5
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm5
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm4
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm20[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm7
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm12, %zmm0
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm3
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm1[0,0,1,1,4,4,5,5]
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm0
+; AVX512F-SLOW-NEXT:    vpbroadcastd (%rax), %ymm1
+; AVX512F-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm4
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm8, %ymm6
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm13[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm0
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm0, 192(%rcx)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm1, (%rcx)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm3, 128(%rcx)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm5, 64(%rcx)
+; AVX512F-SLOW-NEXT:    vzeroupper
+; AVX512F-SLOW-NEXT:    retq
 ;
-; AVX512DQ-FAST-LABEL: store_i16_stride7_vf16:
-; AVX512DQ-FAST:       # %bb.0:
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %ymm7
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %ymm6
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm7[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm5[12,13,14,15],zero,zero,ymm5[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm5[u,u,u,u,u,u,u,u,16,17,18,19]
-; AVX512DQ-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm16
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %xmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %xmm12
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u],zero,zero,ymm6[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[16,17,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,14,15],zero,zero,ymm4[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm4[u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm17
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[16,17,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm1[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm1[u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm18
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm3 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <2,u,3,2,u,10,10,11>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm9
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm3[2],ymm10[3,4],ymm3[5],ymm10[6,7,8,9],ymm3[10],ymm10[11,12],ymm3[13],ymm10[14,15]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm19
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm2, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm13[0,1,2],ymm3[3],ymm13[4,5],ymm3[6],ymm13[7,8,9,10],ymm3[11],ymm13[12,13],ymm3[14],ymm13[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %xmm8
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm13
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm12, %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2],xmm3[3,4],xmm0[5],xmm3[6,7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm15 = [0,8,1,9,0,8,1,9]
-; AVX512DQ-FAST-NEXT:    # zmm15 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm15
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm8 = xmm8[0],xmm14[1],xmm8[2,3],xmm14[4],xmm8[5,6],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %xmm14
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm12, %xmm11, %xmm11
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm11, %zmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %xmm8
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm12
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm10
-; AVX512DQ-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm10, %ymm10
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm8[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,0,0,1,8,9,9,11]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm14, %zmm8, %zmm20
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm8 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm8[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[16,17,u,u]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm10, %zmm10
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm5[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5],ymm7[6],ymm5[7,8,9,10],ymm7[11],ymm5[12,13],ymm7[14],ymm5[15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13,14,15]
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512DQ-FAST-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm8, %zmm6, %zmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <6,u,u,u,7,u,u,7>
-; AVX512DQ-FAST-NEXT:    vpermd %ymm8, %ymm7, %ymm7
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm12[0,0,1,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm16, %zmm3, %zmm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm8, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm10
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm19[2,2,2,3,6,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm11[0,0,1,1,4,4,5,5]
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm0
-; AVX512DQ-FAST-NEXT:    vpbroadcastd (%rax), %ymm2
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm3
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm4
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm1
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm1, 192(%rcx)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm2, (%rcx)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm6, 128(%rcx)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm10, 64(%rcx)
-; AVX512DQ-FAST-NEXT:    vzeroupper
-; AVX512DQ-FAST-NEXT:    retq
+; AVX512F-FAST-LABEL: store_i16_stride7_vf16:
+; AVX512F-FAST:       # %bb.0:
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %ymm5
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %ymm7
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %ymm6
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %ymm2
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,zero,ymm7[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm7[16,17,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm5[12,13,14,15],zero,zero,ymm5[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm5[u,u,u,u,u,u,u,u,16,17,18,19]
+; AVX512F-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm16
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %xmm11
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %xmm12
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u],zero,zero,ymm6[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm6[16,17,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm4[u,u,u,u,u,u,14,15],zero,zero,ymm4[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm4[u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm17
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %xmm14
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm2[u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[14,15,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[16,17,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm1[u,u,u,u,u,u,u,u,u,u,14,15],zero,zero,ymm1[u,u,u,u,u,u,u,u,u,u,16,17],zero,zero,ymm1[u,u,u,u]
+; AVX512F-FAST-NEXT:    vporq %ymm0, %ymm3, %ymm18
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,u,u,u,u,26,27,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm3 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm4[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm9[0,1],ymm3[2],ymm9[3,4],ymm3[5],ymm9[6,7,8,9],ymm3[10],ymm9[11,12],ymm3[13],ymm9[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <2,u,3,2,u,10,10,11>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm9
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm5[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,u,u,20,21,24,25,u,u,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm10[0,1],ymm3[2],ymm10[3,4],ymm3[5],ymm10[6,7,8,9],ymm3[10],ymm10[11,12],ymm3[13],ymm10[14,15]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm19
+; AVX512F-FAST-NEXT:    vprold $16, %ymm2, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm1[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,u,u,18,19,20,21,u,u,20,21]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm13[0,1,2],ymm3[3],ymm13[4,5],ymm3[6],ymm13[7,8,9,10],ymm3[11],ymm13[12,13],ymm3[14],ymm13[15]
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %xmm8
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,2,3,3,10,9,11,10]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm13
+; AVX512F-FAST-NEXT:    vprold $16, %xmm12, %xmm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm3 = xmm11[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm3[0,1],xmm0[2],xmm3[3,4],xmm0[5],xmm3[6,7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm15 = [0,8,1,9,0,8,1,9]
+; AVX512F-FAST-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm0, %zmm3, %zmm15
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[u,u,4,5,u,u,u,u,6,7,u,u,u,u,8,9]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm8 = xmm8[0],xmm14[1],xmm8[2,3],xmm14[4],xmm8[5,6],xmm14[7]
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %xmm14
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm11 = xmm11[0],xmm12[0],xmm11[1],xmm12[1],xmm11[2],xmm12[2],xmm11[3],xmm12[3]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm12 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-FAST-NEXT:    vpshufb %xmm12, %xmm11, %xmm11
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm11, %zmm11
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %xmm8
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm10 = xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
+; AVX512F-FAST-NEXT:    vpshufb %xmm12, %xmm10, %xmm12
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm10
+; AVX512F-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm10, %ymm10
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm14 = xmm8[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm8 = xmm8[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,0,0,1,8,9,9,11]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm14, %zmm8, %zmm20
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm8 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm8[12,13,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[14,15,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm8[16,17,u,u]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm10, %zmm10
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,14,15,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm5[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0,1,2],ymm7[3],ymm5[4,5],ymm7[6],ymm5[7,8,9,10],ymm7[11],ymm5[12,13],ymm7[14],ymm5[15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm6 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,u,u,u,u,30,31,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0],ymm6[1],ymm4[2,3],ymm6[4],ymm4[5,6,7,8],ymm6[9],ymm4[10,11],ymm6[12],ymm4[13,14,15]
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm6 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
+; AVX512F-FAST-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermd %zmm8, %zmm6, %zmm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <6,u,u,u,7,u,u,7>
+; AVX512F-FAST-NEXT:    vpermd %ymm8, %ymm7, %ymm7
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,3]
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,0,2,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm12[0,0,1,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,1,3,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,2,2,3]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,u,u,28,29,26,27,u,u,30,31,30,31]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,2,2,3]
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rcx
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm16, %zmm3, %zmm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm8, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm10
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm10
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm19[2,2,2,3,6,6,6,7]
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm13, %zmm6
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm6
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} zmm0 = zmm11[0,0,1,1,4,4,5,5]
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm0
+; AVX512F-FAST-NEXT:    vpbroadcastd (%rax), %ymm2
+; AVX512F-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm3
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm5, %ymm4
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm7, %ymm1
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm4, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa %ymm1, 192(%rcx)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm2, (%rcx)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm6, 128(%rcx)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm10, 64(%rcx)
+; AVX512F-FAST-NEXT:    vzeroupper
+; AVX512F-FAST-NEXT:    retq
 ;
 ; AVX512BW-LABEL: store_i16_stride7_vf16:
 ; AVX512BW:       # %bb.0:
@@ -5248,1251 +4982,628 @@ define void @store_i16_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-FAST-PERLANE-NEXT:    vzeroupper
 ; AVX2-FAST-PERLANE-NEXT:    retq
 ;
-; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf32:
-; AVX512F-ONLY-SLOW:       # %bb.0:
-; AVX512F-ONLY-SLOW-NEXT:    subq $632, %rsp # imm = 0x278
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm9, %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm16
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm29
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm1, %ymm15
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm1, %ymm10, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm11
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm11, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r9), %xmm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r8), %xmm6
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm10[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm2[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm2, %zmm3, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm6, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm3, %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm9, %ymm12, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r9), %ymm13
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r8), %ymm14
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm1, %ymm13, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm14, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm2 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm2, %xmm8, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm2, %xmm20
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <u,16,u,u,17,17,u,u,0,u,u,1,2,u,u,3>
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm2, %zmm1, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm10, %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm10[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm9[0,1,2],ymm5[3],ymm9[4,5],ymm5[6],ymm9[7,8,9,10],ymm5[11],ymm9[12,13],ymm5[14],ymm9[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2q %zmm4, %zmm5, %zmm27
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm31
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rax), %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm28
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm30
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm12[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm19
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm13, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm13[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm16, %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm29[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm15, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm15[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm17[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm9, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm0[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm1[2],xmm15[3,4],xmm1[5],xmm15[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm20, %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm9, %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm15
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm15[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm0 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm26
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm15[4],xmm9[4],xmm15[5],xmm9[5],xmm15[6],xmm9[6],xmm15[7],xmm9[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm9, %xmm25
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm6[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1],ymm3[2],ymm6[3,4],ymm3[5],ymm6[6,7,8,9],ymm3[10],ymm6[11,12],ymm3[13],ymm6[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm12[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm22
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm14[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm13[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3],ymm6[4,5],ymm3[6],ymm6[7,8,9,10],ymm3[11],ymm6[12,13],ymm3[14],ymm6[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm21
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rax), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vpermd %zmm6, %zmm3, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm28, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm7, %ymm6, %ymm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm11, %zmm6
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm13
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm14
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm14, %xmm14
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm13[0,1],xmm14[2],xmm13[3,4],xmm14[5],xmm13[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm16
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm4, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm29, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm29[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm9[0,1],ymm14[2],ymm9[3,4],ymm14[5],ymm9[6,7,8,9],ymm14[10],ymm9[11,12],ymm14[13],ymm9[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm31, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm15[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm30[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm23[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm19[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm18[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm5[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0],ymm12[1],ymm5[2,3],ymm12[4],ymm5[5,6,7,8],ymm12[9],ymm5[10,11],ymm12[12],ymm5[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm28, %zmm12
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm10, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm12, %zmm10, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm31, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm0 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm9 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm11[0,1,2,3],zmm0[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm11 = mem[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm12 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm15 = mem[2,3,3,3,6,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm17 = mem[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm8 = mem[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm26[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm25, %xmm13
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm13 = xmm13[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm24[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm22[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm21[2,2,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm20[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm16[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm16, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm15[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm17, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm18, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm10, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm1, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm8, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm8 = zmm10[0,1,2,3],zmm8[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm6
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm6
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm30, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd (%rax), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm14, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermd (%rax), %zmm2, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm6, 320(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm1, 256(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm7, 192(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm3, 384(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    addq $632, %rsp # imm = 0x278
-; AVX512F-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512F-ONLY-SLOW-NEXT:    retq
-;
-; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf32:
-; AVX512F-ONLY-FAST:       # %bb.0:
-; AVX512F-ONLY-FAST-NEXT:    subq $248, %rsp
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm1, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm1, %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm4, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm4, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm4, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm4, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm6, %ymm16
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm13, %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm4, %ymm14, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm6, %ymm7, %ymm25
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm11, %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm8, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm6, %ymm5, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm3, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm3, %ymm29
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm12, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %ymm15
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm4, %ymm6, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm0, %ymm1, %ymm21
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm11, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm8[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm11, %ymm30
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6,7,8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm24 = <u,2,2,3,10,u,11,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm1, %zmm2, %zmm24
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm10[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm18[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm10[0],xmm5[1],xmm10[1],xmm5[2],xmm10[2],xmm5[3],xmm10[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,2,2,3,8,u,9,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm23
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm16[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm11, %xmm4, %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [2,1,3,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm0[0,1],ymm4[2],ymm0[3,4],ymm4[5],ymm0[6,7,8,9],ymm4[10],ymm0[11,12],ymm4[13],ymm0[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [2,2,2,3,8,8,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm0, %xmm3, %xmm7
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm7, %zmm4, %zmm22
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2],xmm1[3,4],xmm2[5],xmm1[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,0,u,1,8,8,9,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm26
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm2[0],xmm9[1],xmm2[1],xmm9[2],xmm2[2],xmm9[3],xmm2[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,0,1,8,9,9,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm27
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm4[0,1],ymm0[2],ymm4[3,4],ymm0[5],ymm4[6,7,8,9],ymm0[10],ymm4[11,12],ymm0[13],ymm4[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm13, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm14[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1,2],ymm13[3],ymm4[4,5],ymm13[6],ymm4[7,8,9,10],ymm13[11],ymm4[12,13],ymm13[14],ymm4[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm0, %zmm4, %zmm28
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm2[4],xmm9[5],xmm2[5],xmm9[6],xmm2[6],xmm9[7],xmm2[7]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm25, %zmm0, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm11, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <0,0,1,1,12,13,u,15>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm4, %zmm2, %zmm25
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rax), %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm9, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm31
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm30, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm29, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm2 = ymm7[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm15, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8,9,10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm29 = <0,1,u,3,10,10,11,11>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm2, %zmm21, %zmm29
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rax), %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512F-ONLY-FAST-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm8, %ymm21, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm7, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm7[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm12[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512F-ONLY-FAST-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm4, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm12 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7,8,9],ymm12[10],ymm13[11,12],ymm12[13],ymm13[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [2,2,2,3,8,10,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm30, %zmm12
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm13[2],ymm3[3,4],ymm13[5],ymm3[6,7,8,9],ymm13[10],ymm3[11,12],ymm13[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm15, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm13[2],ymm6[3,4],ymm13[5],ymm6[6,7,8,9],ymm13[10],ymm6[11,12],ymm13[13],ymm6[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,1,3,2,10,10,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm15 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm15, %xmm10, %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0],xmm10[1],xmm5[2,3],xmm10[4],xmm5[5,6],xmm10[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm3 = xmm3[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [0,0,1,1,8,8,10,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm3 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm3 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm10 = xmm1[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm11, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm11, %xmm3, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,1,1,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm4, %zmm10
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm14, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm11 = xmm0[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2],xmm11[3,4],xmm3[5],xmm11[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm1, %xmm0, %xmm11
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm15, %xmm0, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm14 = xmm3[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm2 = xmm14[0],xmm2[1],xmm14[2,3],xmm2[4],xmm14[5,6],xmm2[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm24, %zmm2, %zmm12
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm12, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm4 = ymm12[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm1[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm7[0,1],ymm4[2],ymm7[3,4],ymm4[5],ymm7[6,7,8,9],ymm4[10],ymm7[11,12],ymm4[13],ymm7[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 (%rax), %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm7 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
-; AVX512F-ONLY-FAST-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2d %zmm3, %zmm8, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm6, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm26, %zmm8, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm14[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm19[0,0,1,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm18[2,2,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm17[2,1,3,2]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm15[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm14 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm14[0],ymm0[1],ymm14[2,3],ymm0[4],ymm14[5,6,7,8],ymm0[9],ymm14[10,11],ymm0[12],ymm14[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm15 = ymm1[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7,8,9],ymm15[10],ymm14[11,12],ymm15[13],ymm14[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm30, %zmm14
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm14
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <6,u,u,u,7,u,u,7>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm9, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm9, %zmm3, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm3, %zmm21, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm14, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm4 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm9 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm8, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm23, %zmm2, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm31
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm31
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm12, %zmm2, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm13, %zmm4, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm29, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm22, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm3, 128(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm6, 256(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm0, 192(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm7, 384(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm31, 64(%rax)
-; AVX512F-ONLY-FAST-NEXT:    addq $248, %rsp
-; AVX512F-ONLY-FAST-NEXT:    vzeroupper
-; AVX512F-ONLY-FAST-NEXT:    retq
-;
-; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf32:
-; AVX512DQ-SLOW:       # %bb.0:
-; AVX512DQ-SLOW-NEXT:    subq $632, %rsp # imm = 0x278
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm9, %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm16
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm29
-; AVX512DQ-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm1, %ymm15
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm14, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512DQ-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %ymm10
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm1, %ymm10, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %ymm11
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm11, %ymm3
-; AVX512DQ-SLOW-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r9), %xmm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r8), %xmm6
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm10[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm2[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %xmm8
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm2, %zmm3, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm6
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm6, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm14, %ymm3, %ymm4
-; AVX512DQ-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm12
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm9, %ymm12, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm4
-; AVX512DQ-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r9), %ymm13
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r8), %ymm14
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm1, %ymm13, %ymm1
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm14, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512DQ-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm2 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm2, %xmm8, %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm2, %xmm20
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <u,16,u,u,17,17,u,u,0,u,u,1,2,u,u,3>
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm2, %zmm1, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm10, %ymm4
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm10[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm9[0,1,2],ymm5[3],ymm9[4,5],ymm5[6],ymm9[7,8,9,10],ymm5[11],ymm9[12,13],ymm5[14],ymm9[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-SLOW-NEXT:    vpermi2q %zmm4, %zmm5, %zmm27
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm31
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rax), %ymm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm0
-; AVX512DQ-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm28
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm30
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm12[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm19
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm13, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm13[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm16, %ymm4
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm29[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm15, %ymm8
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm15[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm17[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm9, %xmm1
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm0[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm1[2],xmm15[3,4],xmm1[5],xmm15[6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm20, %xmm0
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm9, %xmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm15
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm15[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm0 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm26
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm15[4],xmm9[4],xmm15[5],xmm9[5],xmm15[6],xmm9[6],xmm15[7],xmm9[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm9, %xmm25
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm6[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1],ymm3[2],ymm6[3,4],ymm3[5],ymm6[6,7,8,9],ymm3[10],ymm6[11,12],ymm3[13],ymm6[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm12[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm22
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm14[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm13[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3],ymm6[4,5],ymm3[6],ymm6[7,8,9,10],ymm3[11],ymm6[12,13],ymm3[14],ymm6[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm21
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm3 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
-; AVX512DQ-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rax), %ymm6
-; AVX512DQ-SLOW-NEXT:    vpermd %zmm6, %zmm3, %zmm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm28, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm7, %ymm6, %ymm11
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm6
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm11, %zmm6
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm11
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %xmm13
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %xmm14
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm14, %xmm14
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm13[0,1],xmm14[2],xmm13[3,4],xmm14[5],xmm13[6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm16
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm4, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm29, %ymm12
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm29[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm9[0,1],ymm14[2],ymm9[3,4],ymm14[5],ymm9[6,7,8,9],ymm14[10],ymm9[11,12],ymm14[13],ymm9[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm31, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm15[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm30[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm23[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm19[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm18[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm5[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0],ymm12[1],ymm5[2,3],ymm12[4],ymm5[5,6,7,8],ymm12[9],ymm5[10,11],ymm12[12],ymm5[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm28, %zmm12
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm10, %zmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm12, %zmm10, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm31, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm1
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm0 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm9 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm11[0,1,2,3],zmm0[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm11 = mem[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm12 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm15 = mem[2,3,3,3,6,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm17 = mem[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm8 = mem[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm26[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm25, %xmm13
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm13 = xmm13[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm24[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm22[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm21[2,2,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm20[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm16[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm16, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm15[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm17, %zmm1
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm18, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm10, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm1
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm1, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm8, %zmm8
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm8
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm8 = zmm10[0,1,2,3],zmm8[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm6
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm6
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm30, %zmm8
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd (%rax), %ymm9
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm10
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm14, %zmm2
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm2 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512DQ-SLOW-NEXT:    # zmm2 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermd (%rax), %zmm2, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm2
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm9, (%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm6, 320(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm1, 256(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm7, 192(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm3, 384(%rax)
-; AVX512DQ-SLOW-NEXT:    addq $632, %rsp # imm = 0x278
-; AVX512DQ-SLOW-NEXT:    vzeroupper
-; AVX512DQ-SLOW-NEXT:    retq
+; AVX512F-SLOW-LABEL: store_i16_stride7_vf32:
+; AVX512F-SLOW:       # %bb.0:
+; AVX512F-SLOW-NEXT:    subq $632, %rsp # imm = 0x278
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm9, %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm16
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm29
+; AVX512F-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm1, %ymm15
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm14, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm17
+; AVX512F-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %ymm10
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm1 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm1, %ymm10, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %ymm11
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm11, %ymm3
+; AVX512F-SLOW-NEXT:    vpor %ymm2, %ymm3, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r9), %xmm3
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r8), %xmm6
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm10[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm6[0],xmm3[0],xmm6[1],xmm3[1],xmm6[2],xmm3[2],xmm6[3],xmm3[3]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm2[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm7
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %xmm8
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm6[4],xmm3[4],xmm6[5],xmm3[5],xmm6[6],xmm3[6],xmm6[7],xmm3[7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm2[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm2, %zmm3, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm6
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm6, %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm14, %ymm3, %ymm4
+; AVX512F-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm12
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm7
+; AVX512F-SLOW-NEXT:    vpshufb %ymm9, %ymm12, %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm4
+; AVX512F-SLOW-NEXT:    vpor %ymm2, %ymm4, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r9), %ymm13
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r8), %ymm14
+; AVX512F-SLOW-NEXT:    vpshufb %ymm1, %ymm13, %ymm1
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm14, %ymm0
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %xmm0
+; AVX512F-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm2 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm2, %xmm8, %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm2, %xmm20
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm0[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm8[0],xmm0[1],xmm8[1],xmm0[2],xmm8[2],xmm0[3],xmm8[3]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm4 = <u,16,u,u,17,17,u,u,0,u,u,1,2,u,u,3>
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm2, %zmm1, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %xmm2
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm4[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm5, %zmm4, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm10, %ymm4
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm5[0,1],ymm4[2],ymm5[3,4],ymm4[5],ymm5[6,7,8,9],ymm4[10],ymm5[11,12],ymm4[13],ymm5[14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm11[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm10[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm9[0,1,2],ymm5[3],ymm9[4,5],ymm5[6],ymm9[7,8,9,10],ymm5[11],ymm9[12,13],ymm5[14],ymm9[15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [2,2,3,3,10,9,11,10]
+; AVX512F-SLOW-NEXT:    vpermi2q %zmm4, %zmm5, %zmm27
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm8[4],xmm0[5],xmm8[5],xmm0[6],xmm8[6],xmm0[7],xmm8[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm0, %xmm31
+; AVX512F-SLOW-NEXT:    vmovdqa (%rax), %ymm4
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm0
+; AVX512F-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm28
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm0
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm30
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm12[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm7[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm23
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm6[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm3[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm19
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm13, %ymm0
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm13[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm14[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm16, %ymm4
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm29[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm5, %ymm18
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm15, %ymm8
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm15[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm17[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm0
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm9, %xmm1
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm15 = xmm0[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm15[0,1],xmm1[2],xmm15[3,4],xmm1[5],xmm15[6,7]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm0[0],xmm9[0],xmm0[1],xmm9[1],xmm0[2],xmm9[2],xmm0[3],xmm9[3]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm9
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm20, %xmm0
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm9, %xmm0
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm15
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm15[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm0 = xmm11[0],xmm0[1],xmm11[2,3],xmm0[4],xmm11[5,6],xmm0[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm0, %ymm26
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm15[0],xmm9[0],xmm15[1],xmm9[1],xmm15[2],xmm9[2],xmm15[3],xmm9[3]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm9 = xmm15[4],xmm9[4],xmm15[5],xmm9[5],xmm15[6],xmm9[6],xmm15[7],xmm9[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm9, %xmm25
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm6[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1],ymm3[2],ymm6[3,4],ymm3[5],ymm6[6,7,8,9],ymm3[10],ymm6[11,12],ymm3[13],ymm6[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm12[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm22
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm14[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm6 = ymm13[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0,1,2],ymm3[3],ymm6[4,5],ymm3[6],ymm6[7,8,9,10],ymm3[11],ymm6[12,13],ymm3[14],ymm6[15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm21
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
+; AVX512F-SLOW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rax), %ymm6
+; AVX512F-SLOW-NEXT:    vpermd %zmm6, %zmm3, %zmm3
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm28, %ymm7
+; AVX512F-SLOW-NEXT:    vpshufb %ymm7, %ymm6, %ymm11
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm6, %ymm6
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm11, %zmm6
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm11
+; AVX512F-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %xmm13
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %xmm14
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm13[0],xmm14[0],xmm13[1],xmm14[1],xmm13[2],xmm14[2],xmm13[3],xmm14[3]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm15 = xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm14, %xmm14
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm13[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm13[0,1],xmm14[2],xmm13[3,4],xmm14[5],xmm13[6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm16
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm4, %ymm2
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm14 = ymm4[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm29, %ymm12
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm29[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm14 = ymm9[0,1],ymm14[2],ymm9[3,4],ymm14[5],ymm9[6,7,8,9],ymm14[10],ymm9[11,12],ymm14[13],ymm9[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm31, %xmm4
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm9 = xmm4[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm15[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm30[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm23[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm19[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm18[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm0[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm5[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1],ymm5[2],ymm4[3,4],ymm5[5],ymm4[6,7,8,9],ymm5[10],ymm4[11,12],ymm5[13],ymm4[14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm12 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm5 = ymm5[0],ymm12[1],ymm5[2,3],ymm12[4],ymm5[5,6,7,8],ymm12[9],ymm5[10,11],ymm12[12],ymm5[13,14,15]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm28, %zmm12
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm10, %zmm7
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm12, %zmm10, %zmm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[0,1,1,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm31, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm1
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm9, %zmm0 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm9 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm10, %zmm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm11[0,1,2,3],zmm0[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm11 = mem[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm12 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpshufd $254, {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm15 = mem[2,3,3,3,6,7,7,7]
+; AVX512F-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm17 = mem[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm8 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm8 = mem[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm26[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm25, %xmm13
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm13 = xmm13[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm24[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm22[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm21[2,2,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm20[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm16[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm16, %zmm0
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm3
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm15[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm17, %zmm1
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm18, %zmm8
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm10, %zmm8
+; AVX512F-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm1
+; AVX512F-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm1, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm8, %zmm8
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm8
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm8 = zmm10[0,1,2,3],zmm8[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm6
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm6
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm30, %zmm8
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
+; AVX512F-SLOW-NEXT:    vpbroadcastd (%rax), %ymm9
+; AVX512F-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm10
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm14, %zmm2
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
+; AVX512F-SLOW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpermd (%rax), %zmm2, %zmm2
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm2
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm2
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm9, (%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm6, 320(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm1, 256(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm7, 192(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm3, 384(%rax)
+; AVX512F-SLOW-NEXT:    addq $632, %rsp # imm = 0x278
+; AVX512F-SLOW-NEXT:    vzeroupper
+; AVX512F-SLOW-NEXT:    retq
 ;
-; AVX512DQ-FAST-LABEL: store_i16_stride7_vf32:
-; AVX512DQ-FAST:       # %bb.0:
-; AVX512DQ-FAST-NEXT:    subq $248, %rsp
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm1, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm1, %ymm10
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm4, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm4, %ymm18
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpor %ymm2, %ymm3, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %ymm11
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm4, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm4, %ymm9
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %ymm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm6, %ymm16
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpor %ymm2, %ymm4, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %ymm13
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm13, %ymm6
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %ymm14
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm4, %ymm14, %ymm7
-; AVX512DQ-FAST-NEXT:    vporq %ymm6, %ymm7, %ymm25
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm11, %ymm6
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %ymm8
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm8, %ymm5
-; AVX512DQ-FAST-NEXT:    vpor %ymm6, %ymm5, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm3, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm3, %ymm29
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %ymm12
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm12, %ymm1
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %ymm15
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %ymm6
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm4, %ymm6, %ymm1
-; AVX512DQ-FAST-NEXT:    vporq %ymm0, %ymm1, %ymm21
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm11, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm8[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm11, %ymm30
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6,7,8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm24 = <u,2,2,3,10,u,11,u>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm1, %zmm2, %zmm24
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm10[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm18[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %xmm10
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %xmm5
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm10[0],xmm5[1],xmm10[1],xmm5[2],xmm10[2],xmm5[3],xmm10[3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,2,2,3,8,u,9,u>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm23
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm16[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %xmm2
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm11, %xmm4, %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [2,1,3,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm20
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm0[0,1],ymm4[2],ymm0[3,4],ymm4[5],ymm0[6,7,8,9],ymm4[10],ymm0[11,12],ymm4[13],ymm0[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [2,2,2,3,8,8,8,9]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm0, %xmm3, %xmm7
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm7, %zmm4, %zmm22
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2],xmm1[3,4],xmm2[5],xmm1[6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,0,u,1,8,8,9,u>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm26
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %xmm9
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm2[0],xmm9[1],xmm2[1],xmm9[2],xmm2[2],xmm9[3],xmm2[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,0,1,8,9,9,11]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm27
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm4[0,1],ymm0[2],ymm4[3,4],ymm0[5],ymm4[6,7,8,9],ymm0[10],ymm4[11,12],ymm0[13],ymm4[14,15]
-; AVX512DQ-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm13, %ymm4
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm14[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1,2],ymm13[3],ymm4[4,5],ymm13[6],ymm4[7,8,9,10],ymm13[11],ymm4[12,13],ymm13[14],ymm4[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm0, %zmm4, %zmm28
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %xmm14
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm2[4],xmm9[5],xmm2[5],xmm9[6],xmm2[6],xmm9[7],xmm2[7]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm25, %zmm0, %zmm4
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm11, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <0,0,1,1,12,13,u,15>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm4, %zmm2, %zmm25
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm2
-; AVX512DQ-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rax), %ymm9
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm9, %ymm4
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm31
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm30, %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm29, %ymm7
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm2 = ymm7[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm15, %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8,9,10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm29 = <0,1,u,3,10,10,11,11>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm2, %zmm21, %zmm29
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rax), %ymm8
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm1
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm21 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512DQ-FAST-NEXT:    # zmm21 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm8, %ymm21, %ymm2
-; AVX512DQ-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm16
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm7, %ymm4
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm7[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm12[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512DQ-FAST-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm4, %ymm13
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm12 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7,8,9],ymm12[10],ymm13[11,12],ymm12[13],ymm13[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [2,2,2,3,8,10,10,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm30, %zmm12
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm13[2],ymm3[3,4],ymm13[5],ymm3[6,7,8,9],ymm13[10],ymm3[11,12],ymm13[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm15, %ymm13
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm13[2],ymm6[3,4],ymm13[5],ymm6[6,7,8,9],ymm13[10],ymm6[11,12],ymm13[13],ymm6[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,1,3,2,10,10,10,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm6
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm15 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm15, %xmm10, %xmm10
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0],xmm10[1],xmm5[2,3],xmm10[4],xmm5[5,6],xmm10[7]
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm3 = xmm3[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [0,0,1,1,8,8,10,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm3 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm3 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm10 = xmm1[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm11, %xmm1
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm11, %xmm3, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,1,1,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm4, %zmm10
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm14, %xmm3
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm11 = xmm0[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2],xmm11[3,4],xmm3[5],xmm11[6,7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm1, %xmm0, %xmm11
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm15, %xmm0, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %xmm3
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm14 = xmm3[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm2 = xmm14[0],xmm2[1],xmm14[2,3],xmm2[4],xmm14[5,6],xmm2[7]
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm24, %zmm2, %zmm12
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm6
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm12, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm4 = ymm12[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm1[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm7[0,1],ymm4[2],ymm7[3,4],ymm4[5],ymm7[6,7,8,9],ymm4[10],ymm7[11,12],ymm4[13],ymm7[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 (%rax), %zmm3
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm7 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
-; AVX512DQ-FAST-NEXT:    # zmm7 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermi2d %zmm3, %zmm8, %zmm7
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm7
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm6
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm8
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm6, %zmm6
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm26, %zmm8, %zmm5
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm6
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm0
-; AVX512DQ-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm10
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm14[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm19[0,0,1,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm18[2,2,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm17[2,1,3,2]
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm5
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm5
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm15[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm14 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm14[0],ymm0[1],ymm14[2,3],ymm0[4],ymm14[5,6,7,8],ymm0[9],ymm14[10,11],ymm0[12],ymm14[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm15 = ymm1[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7,8,9],ymm15[10],ymm14[11,12],ymm15[13],ymm14[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm30, %zmm14
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm14
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <6,u,u,u,7,u,u,7>
-; AVX512DQ-FAST-NEXT:    vpermd %ymm9, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm9, %zmm3, %zmm3
-; AVX512DQ-FAST-NEXT:    vpermd %zmm3, %zmm21, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm14, %zmm3
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm4 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm9 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm8, %zmm9
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm23, %zmm2, %zmm20
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm31
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm31
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm12, %zmm2, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm13, %zmm4, %zmm4
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm29, %zmm16
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm16
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm22, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm0
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm3, 128(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm5, (%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm6, 256(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm0, 192(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm7, 384(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm31, 64(%rax)
-; AVX512DQ-FAST-NEXT:    addq $248, %rsp
-; AVX512DQ-FAST-NEXT:    vzeroupper
-; AVX512DQ-FAST-NEXT:    retq
+; AVX512F-FAST-LABEL: store_i16_stride7_vf32:
+; AVX512F-FAST:       # %bb.0:
+; AVX512F-FAST-NEXT:    subq $248, %rsp
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm1, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa %ymm1, %ymm10
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm4, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm4, %ymm18
+; AVX512F-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpor %ymm2, %ymm3, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %ymm11
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm4, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa %ymm4, %ymm9
+; AVX512F-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %ymm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm6, %ymm16
+; AVX512F-FAST-NEXT:    vmovdqu %ymm6, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpor %ymm2, %ymm4, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %ymm13
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm13, %ymm6
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %ymm14
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm4, %ymm14, %ymm7
+; AVX512F-FAST-NEXT:    vporq %ymm6, %ymm7, %ymm25
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm11, %ymm6
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %ymm8
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm8, %ymm5
+; AVX512F-FAST-NEXT:    vpor %ymm6, %ymm5, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %ymm3
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm3, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm3, %ymm29
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %ymm12
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm12, %ymm1
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %ymm15
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %ymm6
+; AVX512F-FAST-NEXT:    vpshufb %ymm4, %ymm6, %ymm1
+; AVX512F-FAST-NEXT:    vporq %ymm0, %ymm1, %ymm21
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm11, %ymm1
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm8[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm11, %ymm30
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0],ymm2[1],ymm4[2,3],ymm2[4],ymm4[5,6,7,8],ymm2[9],ymm4[10,11],ymm2[12],ymm4[13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm24 = <u,2,2,3,10,u,11,u>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm1, %zmm2, %zmm24
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm10[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm18[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %xmm10
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %xmm5
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm5[0],xmm10[0],xmm5[1],xmm10[1],xmm5[2],xmm10[2],xmm5[3],xmm10[3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm2 = xmm2[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,2,2,3,8,u,9,u>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm23
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm16[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %xmm2
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm11, %xmm4, %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [2,1,3,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm20
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm0[0,1],ymm4[2],ymm0[3,4],ymm4[5],ymm0[6,7,8,9],ymm4[10],ymm0[11,12],ymm4[13],ymm0[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [2,2,2,3,8,8,8,9]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
+; AVX512F-FAST-NEXT:    vmovdqa %xmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpshufb %xmm0, %xmm3, %xmm7
+; AVX512F-FAST-NEXT:    vpermi2q %zmm7, %zmm4, %zmm22
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-FAST-NEXT:    vprold $16, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm1 = xmm1[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm1 = xmm1[0,1],xmm2[2],xmm1[3,4],xmm2[5],xmm1[6,7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
+; AVX512F-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,0,u,1,8,8,9,u>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm2, %zmm1, %zmm26
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %xmm9
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm4 = xmm9[0],xmm2[0],xmm9[1],xmm2[1],xmm9[2],xmm2[2],xmm9[3],xmm2[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm4 = xmm4[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,0,1,8,9,9,11]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm4, %zmm0, %zmm27
+; AVX512F-FAST-NEXT:    vprold $16, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm14[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm4[0,1],ymm0[2],ymm4[3,4],ymm0[5],ymm4[6,7,8,9],ymm0[10],ymm4[11,12],ymm0[13],ymm4[14,15]
+; AVX512F-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm3 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm13, %ymm4
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm14[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm4[0,1,2],ymm13[3],ymm4[4,5],ymm13[6],ymm4[7,8,9,10],ymm13[11],ymm4[12,13],ymm13[14],ymm4[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [2,2,3,3,10,9,11,10]
+; AVX512F-FAST-NEXT:    vpermi2q %zmm0, %zmm4, %zmm28
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %xmm14
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm14[4],xmm0[4],xmm14[5],xmm0[5],xmm14[6],xmm0[6],xmm14[7],xmm0[7]
+; AVX512F-FAST-NEXT:    vpshufb %xmm7, %xmm4, %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm19
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm2[4],xmm9[5],xmm2[5],xmm9[6],xmm2[6],xmm9[7],xmm2[7]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm25, %zmm0, %zmm4
+; AVX512F-FAST-NEXT:    vpshufb %xmm11, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <0,0,1,1,12,13,u,15>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm4, %zmm2, %zmm25
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm2
+; AVX512F-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa (%rax), %ymm9
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm9, %ymm4
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm31
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm30, %ymm2
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm8[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm4[2],ymm2[3,4],ymm4[5],ymm2[6,7,8,9],ymm4[10],ymm2[11,12],ymm4[13],ymm2[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm2, %ymm18
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm29, %ymm7
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm2 = ymm7[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm12[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm4[0,1],ymm2[2],ymm4[3,4],ymm2[5],ymm4[6,7,8,9],ymm2[10],ymm4[11,12],ymm2[13],ymm4[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm15, %ymm2
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm6[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm3[3],ymm2[4,5],ymm3[6],ymm2[7,8,9,10],ymm3[11],ymm2[12,13],ymm3[14],ymm2[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm29 = <0,1,u,3,10,10,11,11>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm2, %zmm21, %zmm29
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rax), %ymm8
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm1
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
+; AVX512F-FAST-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermd %ymm8, %ymm21, %ymm2
+; AVX512F-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm16
+; AVX512F-FAST-NEXT:    vmovdqa %ymm7, %ymm4
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm7[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm12[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0],ymm2[1],ymm3[2,3],ymm2[4],ymm3[5,6,7,8],ymm2[9],ymm3[10,11],ymm2[12],ymm3[13,14,15]
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
+; AVX512F-FAST-NEXT:    # ymm7 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm4, %ymm13
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm12 = ymm13[0,1],ymm12[2],ymm13[3,4],ymm12[5],ymm13[6,7,8,9],ymm12[10],ymm13[11,12],ymm12[13],ymm13[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [2,2,2,3,8,10,10,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm30, %zmm12
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm13 = ymm6[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm13[2],ymm3[3,4],ymm13[5],ymm3[6,7,8,9],ymm13[10],ymm3[11,12],ymm13[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vprold $16, %ymm15, %ymm13
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm6[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm6[0,1],ymm13[2],ymm6[3,4],ymm13[5],ymm6[6,7,8,9],ymm13[10],ymm6[11,12],ymm13[13],ymm6[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm13 = [2,1,3,2,10,10,10,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm6
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm5[4],xmm10[4],xmm5[5],xmm10[5],xmm5[6],xmm10[6],xmm5[7],xmm10[7]
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm15 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
+; AVX512F-FAST-NEXT:    vpshufb %xmm15, %xmm10, %xmm10
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0],xmm10[1],xmm5[2,3],xmm10[4],xmm5[5,6],xmm10[7]
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm3 = xmm3[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [0,0,1,1,8,8,10,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm5
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm3 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm3 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm10 = xmm1[0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
+; AVX512F-FAST-NEXT:    vmovdqa %xmm11, %xmm1
+; AVX512F-FAST-NEXT:    vpshufb %xmm11, %xmm3, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,1,1,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm4, %zmm10
+; AVX512F-FAST-NEXT:    vprold $16, %xmm14, %xmm3
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm11 = xmm0[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm11[0,1],xmm3[2],xmm11[3,4],xmm3[5],xmm11[6,7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm14[0],xmm0[1],xmm14[1],xmm0[2],xmm14[2],xmm0[3],xmm14[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm1, %xmm0, %xmm11
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm2, %zmm11
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %xmm0
+; AVX512F-FAST-NEXT:    vpshufb %xmm15, %xmm0, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %xmm3
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm14 = xmm3[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm2 = xmm14[0],xmm2[1],xmm14[2,3],xmm2[4],xmm14[5,6],xmm2[7]
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm3[4],xmm0[4],xmm3[5],xmm0[5],xmm3[6],xmm0[6],xmm3[7],xmm0[7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm3[0],xmm0[0],xmm3[1],xmm0[1],xmm3[2],xmm0[2],xmm3[3],xmm0[3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm24, %zmm2, %zmm12
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm12, %zmm6
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm12, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm4 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm4[2],ymm3[3,4],ymm4[5],ymm3[6,7,8,9],ymm4[10],ymm3[11,12],ymm4[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm4 = ymm12[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm1[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm4 = ymm7[0,1],ymm4[2],ymm7[3,4],ymm4[5],ymm7[6,7,8,9],ymm4[10],ymm7[11,12],ymm4[13],ymm7[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm13, %zmm4
+; AVX512F-FAST-NEXT:    vmovdqa64 (%rax), %zmm3
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm7 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
+; AVX512F-FAST-NEXT:    # zmm7 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermi2d %zmm3, %zmm8, %zmm7
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm7
+; AVX512F-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm6
+; AVX512F-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm8
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm6, %zmm6
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm6
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm26, %zmm8, %zmm5
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm6
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm0
+; AVX512F-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
+; AVX512F-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm10
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm10, %zmm5, %zmm5
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm14[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm19[0,0,1,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm18[2,2,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm17[2,1,3,2]
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm27, %zmm5
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm5
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm15 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm15[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm14 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm14[0],ymm0[1],ymm14[2,3],ymm0[4],ymm14[5,6,7,8],ymm0[9],ymm14[10,11],ymm0[12],ymm14[13,14,15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm14 = ymm15[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm15 = ymm1[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm14 = ymm14[0,1],ymm15[2],ymm14[3,4],ymm15[5],ymm14[6,7,8,9],ymm15[10],ymm14[11,12],ymm15[13],ymm14[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm30, %zmm14
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm14
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <6,u,u,u,7,u,u,7>
+; AVX512F-FAST-NEXT:    vpermd %ymm9, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm9, %zmm3, %zmm3
+; AVX512F-FAST-NEXT:    vpermd %zmm3, %zmm21, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm28, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm14, %zmm3
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm4 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm11, %zmm9 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm8, %zmm9
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm23, %zmm2, %zmm20
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm25, %zmm31
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm31
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm12, %zmm2, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm13, %zmm4, %zmm4
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm4
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm29, %zmm16
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm16
+; AVX512F-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm22, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm20, %zmm0
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm3, 128(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm5, (%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm6, 256(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm0, 192(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm7, 384(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm31, 64(%rax)
+; AVX512F-FAST-NEXT:    addq $248, %rsp
+; AVX512F-FAST-NEXT:    vzeroupper
+; AVX512F-FAST-NEXT:    retq
 ;
 ; AVX512BW-LABEL: store_i16_stride7_vf32:
 ; AVX512BW:       # %bb.0:
@@ -11576,2699 +10687,1352 @@ define void @store_i16_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-FAST-PERLANE-NEXT:    vzeroupper
 ; AVX2-FAST-PERLANE-NEXT:    retq
 ;
-; AVX512F-ONLY-SLOW-LABEL: store_i16_stride7_vf64:
-; AVX512F-ONLY-SLOW:       # %bb.0:
-; AVX512F-ONLY-SLOW-NEXT:    subq $2200, %rsp # imm = 0x898
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rcx), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rdx), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rdi), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rsi), %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm19
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm9, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm16
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm8, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm17
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%r9), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm3, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm3, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%r8), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm15, %ymm3, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rcx), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rdx), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm6, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rsi), %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rdi), %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm4, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm10, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm15, %ymm2, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm1, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm2, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm23
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm10, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm10, %ymm25
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm11, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm0, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm11, %ymm13
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm12, %ymm13, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r8), %ymm13
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm15, %ymm13, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r9), %ymm15
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm3, %ymm15, %ymm14
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm14, %ymm12, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm15, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm13[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm15[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm14[0,1],ymm10[2],ymm14[3,4],ymm10[5],ymm14[6,7,8,9],ymm10[10],ymm14[11,12],ymm10[13],ymm14[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [2,1,3,2,10,10,10,11]
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2q %zmm10, %zmm12, %zmm14
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512F-ONLY-SLOW-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm14
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm12, %ymm14, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm12, %ymm19
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm14[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm14[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm10[2],ymm9[3,4],ymm10[5],ymm9[6,7,8,9],ymm10[10],ymm9[11,12],ymm10[13],ymm9[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm7 = ymm8[0,1],ymm7[2],ymm8[3,4],ymm7[5],ymm8[6,7,8,9],ymm7[10],ymm8[11,12],ymm7[13],ymm8[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm16, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm17, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%r8), %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[16,17,u,u,u,u],zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm7, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%r9), %ymm14
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm3, %ymm14, %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, %ymm10, %ymm8, %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vextracti64x4 $1, %zmm7, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm12[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %ymm7, %ymm10, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm14, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %ymm8, %ymm10, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm9[0,1,2,3],zmm7[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm8 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm8 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rax), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermd %zmm7, %zmm8, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpandnq %ymm9, %ymm17, %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 72(%rax), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpandn %ymm7, %ymm14, %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rax), %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vpandn %ymm9, %ymm14, %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rax), %ymm14
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm14, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm14, %ymm22
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm12[0,1],ymm9[2],ymm12[3,4],ymm9[5],ymm12[6,7,8,9],ymm9[10],ymm12[11,12],ymm9[13],ymm12[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm0[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm11[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm12[2],ymm9[3,4],ymm12[5],ymm9[6,7,8,9],ymm12[10],ymm9[11,12],ymm12[13],ymm9[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rax), %ymm9
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm9, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpandnq %ymm12, %ymm17, %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm10, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm13
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm10
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm31
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm0[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm11[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpermd %zmm9, %zmm8, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm20, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm24[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm24[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2q %zmm0, %zmm10, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512F-ONLY-SLOW-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermd 64(%rax), %zmm11, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm17, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm3[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6,7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm24[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm8[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%r9), %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%r8), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm3[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm4, %zmm12, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm7[2,3,3,3,6,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 96(%rax), %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm5, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rsi), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rdi), %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm2, %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm18
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rcx), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 96(%rdx), %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm6 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm6, %xmm2, %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm6, %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm2, %zmm28, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 100(%rax), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 104(%rax), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm16, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rcx), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rdx), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm3, %xmm30
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm7, %xmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <16,16,17,17,17,17,u,u,0,1,0,1,2,3,2,3>
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm2, %zmm3, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rdi), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%rsi), %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm5, %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm6, %xmm26
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm4, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm4[2],xmm2[3,4],xmm4[5],xmm2[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm5, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%r9), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 64(%r8), %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm16
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm6 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm4, %zmm6, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 64(%rax), %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 68(%rax), %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm1[4],xmm5[5],xmm1[5],xmm5[6],xmm1[6],xmm5[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm4, %xmm19
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm8, %xmm1, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm8, %xmm29
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3],xmm1[4],xmm5[5,6],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm7[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm5, %zmm3, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm7, %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm5, %xmm5
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm5[2],xmm3[3,4],xmm5[5],xmm3[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm2, %zmm6, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd (%rax), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm23, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm7[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm20
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm7[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm27
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %ymm3, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm5[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm2 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm5[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm6[3],ymm2[4,5],ymm6[6],ymm2[7,8,9,10],ymm6[11],ymm2[12,13],ymm6[14],ymm2[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2q %zmm1, %zmm10, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7,8,9],ymm1[10],ymm6[11,12],ymm1[13],ymm6[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm24
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %ymm13, %ymm8, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm6[2],ymm1[3,4],ymm6[5],ymm1[6,7,8,9],ymm6[10],ymm1[11,12],ymm6[13],ymm1[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm21
-; AVX512F-ONLY-SLOW-NEXT:    vpermd (%rax), %zmm11, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm17, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm10[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm25[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm5[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,3,6,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r9), %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%r8), %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm11 = xmm6[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm11, %zmm12, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm22[2,3,3,3,6,7,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm12
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm25
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm15, %zmm25
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm12
-; AVX512F-ONLY-SLOW-NEXT:    vprold $16, %xmm12, %xmm15
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm1[1,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm13[0,1],xmm15[2],xmm13[3,4],xmm15[5],xmm13[6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm12[4],xmm1[4],xmm12[5],xmm1[5],xmm12[6],xmm1[6],xmm12[7],xmm1[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm16, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm18, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm13, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm2, %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm23 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm22 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm22 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm18 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm18 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm17 = mem[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,5,7,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpermt2d %zmm0, %zmm28, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm28
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm28
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm6
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm6, %xmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm0[1,1,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm7[0],xmm2[1],xmm7[2,3],xmm2[4],xmm7[5,6],xmm2[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm0[3,3,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm2 = mem[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm4[2,1,2,3,6,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm10[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm10[2,2,2,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm30, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm26, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm8 = xmm4[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %xmm19, %xmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm14[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm4 = mem[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm5 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $250, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm6 = mem[2,2,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermpd $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovups %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm31[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm30 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm30 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm26 = mem[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm19 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm11 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm16 = mem[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm27[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm24[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm21[2,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm23, %zmm22
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm23
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm22, %zmm27, %zmm23
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %ymm23, %ymm17, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm17, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm1[0,1,2,3],zmm2[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm1[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm3 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm8 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm10, %zmm22, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm17, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm2 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm10 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm22, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm3, %zmm17, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm18
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm18
-; AVX512F-ONLY-SLOW-NEXT:    vextracti64x4 $1, %zmm23, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm1, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm3, %ymm29
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm21 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm1[0,1,2,3],zmm2[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm24 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm2[4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm23
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm29 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm19, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm16, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm20, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm14, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm0, %zmm19
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm4, %zmm0, %zmm20
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm0, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm26, %zmm30, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm27, %zmm2
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm0 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm3 = mem[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm3 = mem[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm4 = mem[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm3, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm4 = mem[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm5 = mem[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm26 = mem[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm7 = mem[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm9 = mem[0,2,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm11 = mem[0,1,3,2,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm12 = mem[2,1,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm13 = mem[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm14 = mem[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm15 = mem[2,1,2,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # ymm16 = mem[0,0,1,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
-; AVX512F-ONLY-SLOW-NEXT:    # xmm1 = mem[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm11[0,1,1,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm9, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm12, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm11
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm9, %zmm0, %zmm25
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm26, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm3
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm14, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm16, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm4
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm0, %zmm28
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm24
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm24, 320(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm28, 256(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm25, 192(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm10, 64(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm17, 448(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm4, 704(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm11, 640(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm19, 576(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm8, 512(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm0, 384(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm21, 768(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm23, 832(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    addq $2200, %rsp # imm = 0x898
-; AVX512F-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512F-ONLY-SLOW-NEXT:    retq
-;
-; AVX512F-ONLY-FAST-LABEL: store_i16_stride7_vf64:
-; AVX512F-ONLY-FAST:       # %bb.0:
-; AVX512F-ONLY-FAST-NEXT:    subq $1496, %rsp # imm = 0x5D8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rcx), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm15 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm0, %ymm20
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rdx), %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm0, %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm16
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rsi), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm6, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rdi), %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm7, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm17
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%r9), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%r8), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm2, %ymm4, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, (%rsp) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rdx), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm8, %ymm12, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rsi), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rdi), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm28
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm0, %ymm13, %ymm19
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm10, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm10, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %ymm10
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm10, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %ymm15
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm8, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm4, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm11, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm14, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm3, %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm5, %ymm0, %ymm21
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm20, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm0 = ymm2[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm11 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm11[0,1],ymm3[2],ymm11[3,4],ymm3[5],ymm11[6,7,8,9],ymm3[10],ymm11[11,12],ymm3[13],ymm11[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm16, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm17, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%r8), %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[16,17,u,u,u,u],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %ymm25, %ymm3, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%r9), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %ymm12, %ymm0, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,4,u,u,u,5,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm11, %ymm3, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %ymm0, %ymm17, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm17, %zmm22
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm5, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm5, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [0,1,4,5,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rax), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpandnq %ymm1, %ymm25, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm20
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm3, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm6, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm3, %ymm26
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5],ymm1[6],ymm3[7,8,9,10],ymm1[11],ymm3[12,13],ymm1[14],ymm3[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm29
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3],ymm3[4],ymm6[5,6,7,8],ymm3[9],ymm6[10,11],ymm3[12],ymm6[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm6 = [0,2,2,3,10,9,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm6, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm7, %ymm27
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm9[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm7[0],ymm1[1],ymm7[2,3],ymm1[4],ymm7[5,6,7,8],ymm1[9],ymm7[10,11],ymm1[12],ymm7[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512F-ONLY-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm2, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm16
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm9[2],ymm7[3,4],ymm9[5],ymm7[6,7,8,9],ymm9[10],ymm7[11,12],ymm9[13],ymm7[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [2,2,2,3,8,10,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm17, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm1, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm1, %zmm24
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <5,u,u,u,6,u,u,6>
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm11, %ymm1, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 64(%rax), %zmm12
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
-; AVX512F-ONLY-FAST-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermi2d %zmm12, %zmm0, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 72(%rax), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rax), %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm20, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm7, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm7, %ymm30
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm11
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%r9), %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%r8), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm2, %xmm20
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [0,0,1,1,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rax), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm7 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm14, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7,8,9,10],ymm3[11],ymm0[12,13],ymm3[14],ymm0[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <0,1,u,3,10,10,11,11>
-; AVX512F-ONLY-FAST-NEXT:    vpermi2q %zmm0, %zmm21, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm26, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm9, %ymm8, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5],ymm0[6],ymm3[7,8,9,10],ymm0[11],ymm3[12,13],ymm0[14],ymm3[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm29, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6,7,8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm6, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm27, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm10, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm15[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2,3],ymm0[4],ymm5[5,6,7,8],ymm0[9],ymm5[10,11],ymm0[12],ymm5[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm16, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm4, %ymm10, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm15[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm24, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm24, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512F-ONLY-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm14, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7,8,9],ymm3[10],ymm0[11,12],ymm3[13],ymm0[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm14, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm13[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7,8,9],ymm3[10],ymm5[11,12],ymm3[13],ymm5[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [2,1,3,2,10,10,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 (%rax), %zmm19
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [14,21,0,0,15,22,0,15,14,21,0,0,15,22,0,15]
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rax), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpermi2d %zmm0, %zmm19, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm28[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm28[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm5[2],ymm3[3,4],ymm5[5],ymm3[6,7,8,9],ymm5[10],ymm3[11,12],ymm5[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm4, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7,8,9],ymm5[10],ymm0[11,12],ymm5[13],ymm0[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm1[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm2, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm8, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm2[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm8, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm2, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm6[3],ymm3[4,5],ymm6[6],ymm3[7,8,9,10],ymm6[11],ymm3[12,13],ymm6[14],ymm3[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm2, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm30, %zmm12, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm25, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rcx), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rdx), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm2, %xmm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,2,2,3,8,9,9,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm31, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm9, %ymm13, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm28[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8,9,10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rsi), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%rdi), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm4, %xmm26
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm3, %xmm24
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [2,1,3,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm2, %zmm3, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm16, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm15, %ymm8, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rcx), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rdx), %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm4 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm4, %xmm2, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm6 = xmm1[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm7, %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [0,1,1,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm5, %zmm1, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm1, %zmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, (%rsp) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 64(%rsi), %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm6, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm4[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3,4],xmm1[5],xmm5[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm9, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,1,1,8,8,10,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm5, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm20, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm10, %xmm2, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [0,0,0,1,8,9,9,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm6, %zmm16, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm8, %xmm4, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm9[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm8[0],xmm6[1],xmm8[2,3],xmm6[4],xmm8[5,6],xmm6[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm14, %xmm8, %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm14, %xmm21
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm6, %zmm11, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %xmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm11, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm4[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm9[0,1],xmm6[2],xmm9[3,4],xmm6[5],xmm9[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm11[0],xmm4[1],xmm11[1],xmm4[2],xmm11[2],xmm4[3],xmm11[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm3, %xmm28
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm6, %zmm27, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm8, %zmm5, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 64(%rax), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 68(%rax), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm23
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm23
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm2, %zmm23
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm10, %xmm22
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm7, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm5, %zmm16, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm4, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm9, %zmm2, %zmm20
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,10,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm5[2],ymm1[3,4],ymm5[5],ymm1[6,7,8,9],ymm5[10],ymm1[11,12],ymm5[13],ymm1[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm9[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm7, %ymm15
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm8 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm18, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %ymm5, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm16[1,2,2,3,5,6,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm16[0,0,2,1,4,4,6,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5],ymm5[6],ymm2[7,8,9,10],ymm5[11],ymm2[12,13],ymm5[14],ymm2[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,3,3,10,9,11,10]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,8,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%r9), %xmm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 96(%r8), %xmm7
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm11[0],xmm7[1],xmm11[1],xmm7[2],xmm11[2],xmm7[3],xmm11[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm19, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512F-ONLY-FAST-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %zmm1, %zmm5, %zmm19
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm25, %zmm19
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm19
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %xmm12
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm12[0],xmm8[1],xmm12[1],xmm8[2],xmm12[2],xmm8[3],xmm12[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm9[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm15[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm9[0],ymm2[1],ymm9[2,3],ymm2[4],ymm9[5,6,7,8],ymm2[9],ymm9[10,11],ymm2[12],ymm9[13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm1, %zmm31, %zmm15
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm31 = [6,7,3,3,7,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm30, %ymm31, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 96(%rax), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm21
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm13, %zmm30, %zmm21
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm24, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm26, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm26, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm2 = xmm24[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm2[0,1],xmm1[2],xmm2[3,4],xmm1[5],xmm2[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm28, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm6, %xmm13, %xmm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,1,3,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm13, %zmm2, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,2,1,8,8,9,11]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm24 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm24, %xmm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm9[0],xmm0[1],xmm9[2,3],xmm0[4],xmm9[5,6],xmm0[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm2, %zmm27, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm25, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm30, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm6, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm22, %xmm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm13, %xmm10, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,1,1,3,8,8,9,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm2, %zmm28, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm6[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm26 = ymm2[3,3,3,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm29 = ymm2[2,2,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 100(%rax), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 104(%rax), %ymm6
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm2, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm16[3,3,3,3,7,7,7,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm10 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm15[0],xmm3[1],xmm15[1],xmm3[2],xmm15[2],xmm3[3],xmm15[3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm9 = xmm6[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [2,2,2,3,8,8,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm9, %zmm16, %zmm10
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm14, %ymm31, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm9, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm10, %zmm30, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
-; AVX512F-ONLY-FAST-NEXT:    vprold $16, %xmm4, %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2],xmm5[3,4],xmm4[5],xmm5[6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm10, %xmm0, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm24, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm0, %xmm12, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm12[4],xmm8[5],xmm12[5],xmm8[6],xmm12[6],xmm8[7],xmm12[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm8[0],xmm0[1],xmm8[2,3],xmm0[4],xmm8[5,6],xmm0[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm15[4],xmm3[5],xmm15[5],xmm3[6],xmm15[6],xmm3[7],xmm15[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm13, %xmm6, %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vpermt2q %zmm3, %zmm28, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm5, %zmm3, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm4, %zmm22, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %ymm7, %ymm0, %ymm29
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm0, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %ymm11, %ymm0, %ymm26
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm26, %zmm0, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm29[0,1,2,3],zmm1[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm12
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd (%rsp), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm10, %xmm5, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm6 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # ymm7 = mem[0,1,1,3,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512F-ONLY-FAST-NEXT:    vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # ymm8 = mem[1,1,1,1,5,5,5,5]
-; AVX512F-ONLY-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm10, %ymm8, %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[16,17],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpandn %ymm8, %ymm11, %ymm8
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm10, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm10 = mem[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm13
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm10, %zmm25, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm4 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm5 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm1 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm5 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm4, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm6, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm8, 320(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm3, 256(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm9, 192(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm19, 128(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm20, (%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm23, 448(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm2, 704(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm21, 640(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovaps %zmm0, 576(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm13, 384(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm5, 64(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm1, 512(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm12, 832(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm4, 768(%rax)
-; AVX512F-ONLY-FAST-NEXT:    addq $1496, %rsp # imm = 0x5D8
-; AVX512F-ONLY-FAST-NEXT:    vzeroupper
-; AVX512F-ONLY-FAST-NEXT:    retq
-;
-; AVX512DQ-SLOW-LABEL: store_i16_stride7_vf64:
-; AVX512DQ-SLOW:       # %bb.0:
-; AVX512DQ-SLOW-NEXT:    subq $2200, %rsp # imm = 0x898
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rcx), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rdx), %ymm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rdi), %ymm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rsi), %ymm8
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm19
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm11, %ymm9, %ymm2
-; AVX512DQ-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm16
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm8, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm2
-; AVX512DQ-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm17
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%r9), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm3, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm3, %ymm10
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%r8), %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm15, %ymm3, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rcx), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rdx), %ymm6
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm11, %ymm6, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rsi), %ymm5
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rdi), %ymm4
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm4, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm10, %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm15, %ymm2, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm1, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm11, %ymm2, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm23
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %ymm10
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm10, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm10, %ymm25
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm1
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
-; AVX512DQ-SLOW-NEXT:    vpor %ymm0, %ymm11, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm0
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm0, %ymm12
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm11
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm11, %ymm13
-; AVX512DQ-SLOW-NEXT:    vpor %ymm12, %ymm13, %ymm10
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r8), %ymm13
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm15, %ymm13, %ymm12
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r9), %ymm15
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm3, %ymm15, %ymm14
-; AVX512DQ-SLOW-NEXT:    vpor %ymm14, %ymm12, %ymm10
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm15, %ymm12
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm13[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm15[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm14[0,1],ymm10[2],ymm14[3,4],ymm10[5],ymm14[6,7,8,9],ymm10[10],ymm14[11,12],ymm10[13],ymm14[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [2,1,3,2,10,10,10,11]
-; AVX512DQ-SLOW-NEXT:    vpermi2q %zmm10, %zmm12, %zmm14
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512DQ-SLOW-NEXT:    # ymm12 = mem[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm14
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm12, %ymm14, %ymm10
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm12, %ymm19
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm14[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm14[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm10[2],ymm9[3,4],ymm10[5],ymm9[6,7,8,9],ymm10[10],ymm9[11,12],ymm10[13],ymm9[14,15]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm7 = ymm8[0,1],ymm7[2],ymm8[3,4],ymm7[5],ymm8[6,7,8,9],ymm7[10],ymm8[11,12],ymm7[13],ymm8[14,15]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm16, %zmm8
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm17, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%r8), %ymm12
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[16,17,u,u,u,u],zero,zero
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm7, %ymm8
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%r9), %ymm14
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm3, %ymm14, %ymm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, %ymm10, %ymm8, %ymm9
-; AVX512DQ-SLOW-NEXT:    vextracti64x4 $1, %zmm7, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm12[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %ymm7, %ymm10, %ymm8
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm14, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,2]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %ymm8, %ymm10, %ymm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm10, %zmm16
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm9[0,1,2,3],zmm7[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm8 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
-; AVX512DQ-SLOW-NEXT:    # zmm8 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rax), %ymm7
-; AVX512DQ-SLOW-NEXT:    vpermd %zmm7, %zmm8, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpandnq %ymm9, %ymm17, %ymm9
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 72(%rax), %ymm7
-; AVX512DQ-SLOW-NEXT:    vpandn %ymm7, %ymm14, %ymm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rax), %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm12
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm9
-; AVX512DQ-SLOW-NEXT:    vpandn %ymm9, %ymm14, %ymm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rax), %ymm14
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm10, %ymm14, %ymm12
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm14, %ymm22
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm12[0,1],ymm9[2],ymm12[3,4],ymm9[5],ymm12[6,7,8,9],ymm9[10],ymm12[11,12],ymm9[13],ymm12[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm0[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm11[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm12[2],ymm9[3,4],ymm12[5],ymm9[6,7,8,9],ymm12[10],ymm9[11,12],ymm12[13],ymm9[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rax), %ymm9
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm10, %ymm9, %ymm10
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpandnq %ymm12, %ymm17, %ymm12
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm10, %zmm10
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm13
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm10
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm31
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm0[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm11[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpermd %zmm9, %zmm8, %zmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm8
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm20, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm24[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm24[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-SLOW-NEXT:    vpermt2q %zmm0, %zmm10, %zmm1
-; AVX512DQ-SLOW-NEXT:    vbroadcasti32x8 {{.*#+}} zmm11 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
-; AVX512DQ-SLOW-NEXT:    # zmm11 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermd 64(%rax), %zmm11, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm17, %zmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm6[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm3[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6,7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm24[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm8[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%r9), %xmm0
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%r8), %xmm1
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm3[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm4, %zmm12, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm7[2,3,3,3,6,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 96(%rax), %ymm5
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm5, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rsi), %xmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rdi), %xmm4
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm2, %xmm5
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm18
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rcx), %xmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa 96(%rdx), %xmm4
-; AVX512DQ-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm6 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm6, %xmm2, %xmm5
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm6, %xmm7
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm2, %zmm28, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 100(%rax), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 104(%rax), %ymm3
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm16, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rcx), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rdx), %xmm2
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm3, %xmm30
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm7, %xmm8
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <16,16,17,17,17,17,u,u,0,1,0,1,2,3,2,3>
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm2, %zmm3, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rdi), %xmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%rsi), %xmm4
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm5, %xmm5
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm6, %xmm26
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm4, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm4[2],xmm2[3,4],xmm4[5],xmm2[6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm5, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%r9), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa 64(%r8), %xmm4
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm16
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm6 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm4, %zmm6, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 64(%rax), %ymm4
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 68(%rax), %ymm5
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm1[4],xmm5[5],xmm1[5],xmm5[6],xmm1[6],xmm5[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm4, %xmm19
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm8, %xmm1, %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm8, %xmm29
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3],xmm1[4],xmm5[5,6],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm7[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm5, %zmm3, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %xmm5
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm7, %xmm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm5, %xmm5
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm5[2],xmm3[3,4],xmm5[5],xmm3[6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm3
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm2, %zmm6, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd (%rax), %ymm2
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm3
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm23, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm7[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[1,1,1,1,5,5,5,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm20
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm7[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm27
-; AVX512DQ-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
-; AVX512DQ-SLOW-NEXT:    vprold $16, %ymm3, %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm5[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm2 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm5[0,0,2,1,4,4,6,5]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm6[3],ymm2[4,5],ymm6[6],ymm2[7,8,9,10],ymm6[11],ymm2[12,13],ymm6[14],ymm2[15]
-; AVX512DQ-SLOW-NEXT:    vpermt2q %zmm1, %zmm10, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
-; AVX512DQ-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[0,1,1,3,4,5,5,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7,8,9],ymm1[10],ymm6[11,12],ymm1[13],ymm6[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm24
-; AVX512DQ-SLOW-NEXT:    vpshufb %ymm13, %ymm8, %ymm1
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm6[2],ymm1[3,4],ymm6[5],ymm1[6,7,8,9],ymm6[10],ymm1[11,12],ymm6[13],ymm1[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm21
-; AVX512DQ-SLOW-NEXT:    vpermd (%rax), %zmm11, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm17, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm10[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm25[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm5[3,3,3,3,7,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,3,6,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r9), %xmm7
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%r8), %xmm2
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm11 = xmm6[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm11, %zmm12, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm22[2,3,3,3,6,7,7,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm12
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm25
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm15, %zmm25
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm12
-; AVX512DQ-SLOW-NEXT:    vprold $16, %xmm12, %xmm15
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm1[1,1,2,3]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm13[0,1],xmm15[2],xmm13[3,4],xmm15[5],xmm13[6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm12[4],xmm1[4],xmm12[5],xmm1[5],xmm12[6],xmm1[6],xmm12[7],xmm1[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm16, %xmm1
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm18, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm13, %xmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm2, %xmm2
-; AVX512DQ-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm23 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm22 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm22 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm18 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm18 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm17 = mem[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,5,7,6]
-; AVX512DQ-SLOW-NEXT:    vpermt2d %zmm0, %zmm28, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm0
-; AVX512DQ-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm6
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm28
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm28
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm6
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512DQ-SLOW-NEXT:    vpshufb %xmm0, %xmm6, %xmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm0
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm0[1,1,2,2]
-; AVX512DQ-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm7[0],xmm2[1],xmm7[2,3],xmm2[4],xmm7[5,6],xmm2[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
-; AVX512DQ-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm0[3,3,3,3]
-; AVX512DQ-SLOW-NEXT:    vpshufd $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm2 = mem[1,2,2,3,5,6,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm4[2,1,2,3,6,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm10[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm10[2,2,2,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm30, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm26, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm8 = xmm4[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %xmm19, %xmm4
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm14[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm4 = mem[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm5 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $250, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm6 = mem[2,2,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermpd $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vmovups %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm31[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm30 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm30 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm26 = mem[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm19 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm11 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm16 = mem[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm27[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm24[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm21[2,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm23, %zmm22
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm23
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm22, %zmm27, %zmm23
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %ymm23, %ymm17, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm17, %ymm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm1[0,1,2,3],zmm2[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm2 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm1[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm3 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm8 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm10, %zmm22, %zmm8
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm17, %zmm8
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm2 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm10 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm22, %zmm10
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm3, %zmm17, %zmm10
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm18
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm18
-; AVX512DQ-SLOW-NEXT:    vextracti64x4 $1, %zmm23, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm1, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm3, %ymm29
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm21 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm1[0,1,2,3],zmm2[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm24 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm2
-; AVX512DQ-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm2[4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm23
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm29 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm19, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm16, %zmm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm2
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm20, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm14, %zmm4
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm4
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm0, %zmm19
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm4, %zmm0, %zmm20
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm0, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm26, %zmm30, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm27, %zmm2
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm0 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm3 = mem[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm3 = mem[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm4 = mem[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm3, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm4 = mem[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm5 = mem[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm26 = mem[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm7 = mem[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm9 = mem[0,2,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm11 = mem[0,1,3,2,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm12 = mem[2,1,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm13 = mem[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm14 = mem[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm15 = mem[2,1,2,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
-; AVX512DQ-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # ymm16 = mem[0,0,1,1]
-; AVX512DQ-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
-; AVX512DQ-SLOW-NEXT:    # xmm1 = mem[0,2,3,3,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm11[0,1,1,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm9, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm12, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm9
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm11
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm9, %zmm0, %zmm25
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm26, %zmm3
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm3
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm14, %zmm0
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm16, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
-; AVX512DQ-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm4
-; AVX512DQ-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm0, %zmm28
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm24
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm24, 320(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm28, 256(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm25, 192(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm10, 64(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm17, 448(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm4, 704(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm11, 640(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm19, 576(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm8, 512(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm0, 384(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm21, 768(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm23, 832(%rax)
-; AVX512DQ-SLOW-NEXT:    addq $2200, %rsp # imm = 0x898
-; AVX512DQ-SLOW-NEXT:    vzeroupper
-; AVX512DQ-SLOW-NEXT:    retq
+; AVX512F-SLOW-LABEL: store_i16_stride7_vf64:
+; AVX512F-SLOW:       # %bb.0:
+; AVX512F-SLOW-NEXT:    subq $2200, %rsp # imm = 0x898
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rcx), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rdx), %ymm9
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rdi), %ymm7
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rsi), %ymm8
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm0 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm19
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm11, %ymm9, %ymm2
+; AVX512F-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm16
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm8, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm7, %ymm2
+; AVX512F-SLOW-NEXT:    vporq %ymm1, %ymm2, %ymm17
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%r9), %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm3, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm3, %ymm10
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm20
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%r8), %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm15 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
+; AVX512F-SLOW-NEXT:    vpshufb %ymm15, %ymm3, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm3, %ymm24
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rcx), %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rdx), %ymm6
+; AVX512F-SLOW-NEXT:    vpshufb %ymm11, %ymm6, %ymm2
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rsi), %ymm5
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rdi), %ymm4
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm4, %ymm2
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm10, %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm15, %ymm2, %ymm2
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm1, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm11, %ymm2, %ymm2
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm23
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %ymm10
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm10, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm10, %ymm25
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm2, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb %ymm0, %ymm2, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdx), %ymm1
+; AVX512F-SLOW-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
+; AVX512F-SLOW-NEXT:    vpor %ymm0, %ymm11, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rsi), %ymm0
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm0, %ymm12
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdi), %ymm11
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm11, %ymm13
+; AVX512F-SLOW-NEXT:    vpor %ymm12, %ymm13, %ymm10
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r8), %ymm13
+; AVX512F-SLOW-NEXT:    vpshufb %ymm15, %ymm13, %ymm12
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r9), %ymm15
+; AVX512F-SLOW-NEXT:    vpshufb %ymm3, %ymm15, %ymm14
+; AVX512F-SLOW-NEXT:    vpor %ymm14, %ymm12, %ymm10
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm15, %ymm12
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm13[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm12 = ymm14[0,1],ymm12[2],ymm14[3,4],ymm12[5],ymm14[6,7,8,9],ymm12[10],ymm14[11,12],ymm12[13],ymm14[14,15]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm15[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm14[0,1],ymm10[2],ymm14[3,4],ymm10[5],ymm14[6,7,8,9],ymm10[10],ymm14[11,12],ymm10[13],ymm14[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [2,1,3,2,10,10,10,11]
+; AVX512F-SLOW-NEXT:    vpermi2q %zmm10, %zmm12, %zmm14
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm12 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
+; AVX512F-SLOW-NEXT:    # ymm12 = mem[0,1,0,1]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm14
+; AVX512F-SLOW-NEXT:    vpshufb %ymm12, %ymm14, %ymm10
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm12, %ymm19
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm14[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0],ymm10[1],ymm12[2,3],ymm10[4],ymm12[5,6,7,8],ymm10[9],ymm12[10,11],ymm10[12],ymm12[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm10 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm7[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm14[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm10[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm10[2],ymm9[3,4],ymm10[5],ymm9[6,7,8,9],ymm10[10],ymm9[11,12],ymm10[13],ymm9[14,15]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm7 = ymm7[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm8[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm7 = ymm8[0,1],ymm7[2],ymm8[3,4],ymm7[5],ymm8[6,7,8,9],ymm7[10],ymm8[11,12],ymm7[13],ymm8[14,15]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm16, %zmm8
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm17, %zmm7
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm7
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%r8), %ymm12
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm12, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm12[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm12[16,17,u,u,u,u],zero,zero
+; AVX512F-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm7, %ymm8
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%r9), %ymm14
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm14, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm3, %ymm14, %ymm9
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $248, %ymm10, %ymm8, %ymm9
+; AVX512F-SLOW-NEXT:    vextracti64x4 $1, %zmm7, %ymm7
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm12[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %ymm7, %ymm10, %ymm8
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm14, %ymm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,2]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %ymm8, %ymm10, %ymm7
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm10, %zmm16
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm9[0,1,2,3],zmm7[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm8 = [6,5,0,0,7,6,0,7,6,5,0,0,7,6,0,7]
+; AVX512F-SLOW-NEXT:    # zmm8 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rax), %ymm7
+; AVX512F-SLOW-NEXT:    vpermd %zmm7, %zmm8, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm7[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
+; AVX512F-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpandnq %ymm9, %ymm17, %ymm9
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
+; AVX512F-SLOW-NEXT:    vpbroadcastd 72(%rax), %ymm7
+; AVX512F-SLOW-NEXT:    vpandn %ymm7, %ymm14, %ymm9
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rax), %ymm7
+; AVX512F-SLOW-NEXT:    vpshufb %ymm10, %ymm7, %ymm12
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpbroadcastd 8(%rax), %ymm9
+; AVX512F-SLOW-NEXT:    vpandn %ymm9, %ymm14, %ymm9
+; AVX512F-SLOW-NEXT:    vmovdqa (%rax), %ymm14
+; AVX512F-SLOW-NEXT:    vpshufb %ymm10, %ymm14, %ymm12
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm14, %ymm22
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm12[0,1],ymm9[2],ymm12[3,4],ymm9[5],ymm12[6,7,8,9],ymm9[10],ymm12[11,12],ymm9[13],ymm12[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm0[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm11[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm9 = ymm9[0,1],ymm12[2],ymm9[3,4],ymm12[5],ymm9[6,7,8,9],ymm12[10],ymm9[11,12],ymm12[13],ymm9[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm9, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rax), %ymm9
+; AVX512F-SLOW-NEXT:    vpshufb %ymm10, %ymm9, %ymm10
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm9[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpandnq %ymm12, %ymm17, %ymm12
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm10, %zmm10
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm13[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm12 = ymm15[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm12[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm12[0,1,2],ymm10[3],ymm12[4,5],ymm10[6],ymm12[7,8,9,10],ymm10[11],ymm12[12,13],ymm10[14],ymm12[15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm13
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm10
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm12 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm10 = ymm10[0,1],ymm12[2],ymm10[3,4],ymm12[5],ymm10[6,7,8,9],ymm12[10],ymm10[11,12],ymm12[13],ymm10[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm2[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm31
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm0[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm11[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm11[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm0[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1,2],ymm0[3],ymm1[4,5],ymm0[6],ymm1[7,8,9,10],ymm0[11],ymm1[12,13],ymm0[14],ymm1[15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpermd %zmm9, %zmm8, %zmm0
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm5[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm5[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm2
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm2, %ymm0
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm6[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm1[2],ymm0[3,4],ymm1[5],ymm0[6,7,8,9],ymm1[10],ymm0[11,12],ymm1[13],ymm0[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm8
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm20, %ymm0
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm24[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm24[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,2,3,3,10,9,11,10]
+; AVX512F-SLOW-NEXT:    vpermt2q %zmm0, %zmm10, %zmm1
+; AVX512F-SLOW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [0,5,4,0,0,6,5,0,0,5,4,0,0,6,5,0]
+; AVX512F-SLOW-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpermd 64(%rax), %zmm11, %zmm0
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm17, %zmm0
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm6[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm3[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0],ymm1[1],ymm0[2,3],ymm1[4],ymm0[5,6,7,8],ymm1[9],ymm0[10,11],ymm1[12],ymm0[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm4[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm5[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm1[3],ymm0[4,5],ymm1[6],ymm0[7,8,9,10],ymm1[11],ymm0[12,13],ymm1[14],ymm0[15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm24[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm8[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm1[0,1],ymm0[2],ymm1[3,4],ymm0[5],ymm1[6,7,8,9],ymm0[10],ymm1[11,12],ymm0[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [4,5,4,5,4,5,6,7,16,17,16,17,16,17,17,19]
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%r9), %xmm0
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%r8), %xmm1
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm1[0],xmm0[0],xmm1[1],xmm0[1],xmm1[2],xmm0[2],xmm1[3],xmm0[3]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm3[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm4, %zmm12, %zmm2
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm7[2,3,3,3,6,7,7,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpbroadcastd 96(%rax), %ymm5
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm5, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm5, %zmm15
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rsi), %xmm2
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rdi), %xmm4
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm2, %xmm5
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0,1],xmm5[2],xmm6[3,4],xmm5[5],xmm6[6,7]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm18
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm4[4],xmm2[5],xmm4[5],xmm2[6],xmm4[6],xmm2[7],xmm4[7]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rcx), %xmm2
+; AVX512F-SLOW-NEXT:    vmovdqa 96(%rdx), %xmm4
+; AVX512F-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm6 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm6, %xmm2, %xmm5
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm6, %xmm7
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm6 = xmm4[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm1[4],xmm0[4],xmm1[5],xmm0[5],xmm1[6],xmm0[6],xmm1[7],xmm0[7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm3[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <16,18,19,19,19,19,u,u,0,1,0,1,2,3,2,3>
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm2, %zmm28, %zmm1
+; AVX512F-SLOW-NEXT:    vpbroadcastd 100(%rax), %ymm2
+; AVX512F-SLOW-NEXT:    vpbroadcastd 104(%rax), %ymm3
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm16, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rcx), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rdx), %xmm2
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm3, %xmm30
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm3 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm7, %xmm8
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm2[0],xmm1[1],xmm2[2,3],xmm1[4],xmm2[5,6],xmm1[7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <16,16,17,17,17,17,u,u,0,1,0,1,2,3,2,3>
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm2, %zmm3, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rdi), %xmm2
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%rsi), %xmm4
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm5 = xmm2[0],xmm4[0],xmm2[1],xmm4[1],xmm2[2],xmm4[2],xmm2[3],xmm4[3]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm5, %xmm5
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm6 = xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm6, %xmm26
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm4, %xmm4
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm2[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm2[0,1],xmm4[2],xmm2[3,4],xmm4[5],xmm2[6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm2, %zmm5, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%r9), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa 64(%r8), %xmm4
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm4[4],xmm1[4],xmm4[5],xmm1[5],xmm4[6],xmm1[6],xmm4[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm5, %xmm16
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm1[0],xmm4[1],xmm1[1],xmm4[2],xmm1[2],xmm4[3],xmm1[3]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm4 = xmm1[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm6 = <0,1,0,1,0,1,1,3,16,18,19,19,19,19,u,u>
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm4, %zmm6, %zmm1
+; AVX512F-SLOW-NEXT:    vpbroadcastd 64(%rax), %ymm4
+; AVX512F-SLOW-NEXT:    vpbroadcastd 68(%rax), %ymm5
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm5[4],xmm1[4],xmm5[5],xmm1[5],xmm5[6],xmm1[6],xmm5[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm4, %xmm19
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm5[0],xmm1[0],xmm5[1],xmm1[1],xmm5[2],xmm1[2],xmm5[3],xmm1[3]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm8, %xmm1, %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm8, %xmm29
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm1 = xmm5[0],xmm1[1],xmm5[2,3],xmm1[4],xmm5[5,6],xmm1[7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm7[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm5, %zmm3, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %xmm5
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm7 = xmm3[0],xmm5[0],xmm3[1],xmm5[1],xmm3[2],xmm5[2],xmm3[3],xmm5[3]
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm7, %xmm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm14 = xmm5[4],xmm3[4],xmm5[5],xmm3[5],xmm5[6],xmm3[6],xmm5[7],xmm3[7]
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm5, %xmm5
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm3[0,1],xmm5[2],xmm3[3,4],xmm5[5],xmm3[6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm3
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm1, %zmm2, %zmm3
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %xmm2
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm2[0],xmm1[0],xmm2[1],xmm1[1],xmm2[2],xmm1[2],xmm2[3],xmm1[3]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm2 = xmm1[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm1 = xmm1[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm2, %zmm6, %zmm1
+; AVX512F-SLOW-NEXT:    vpbroadcastd (%rax), %ymm2
+; AVX512F-SLOW-NEXT:    vpbroadcastd 4(%rax), %ymm3
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm9, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm23, %ymm7
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm7[1,2,3,3,4,5,6,7,9,10,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[1,1,1,1,5,5,5,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm20
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm1 = ymm7[0,1,2,3,5,4,6,7,8,9,10,11,13,12,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm25[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm27
+; AVX512F-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
+; AVX512F-SLOW-NEXT:    vprold $16, %ymm3, %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm5[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm2 = ymm3[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm5[0,0,2,1,4,4,6,5]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm6[3],ymm2[4,5],ymm6[6],ymm2[7,8,9,10],ymm6[11],ymm2[12,13],ymm6[14],ymm2[15]
+; AVX512F-SLOW-NEXT:    vpermt2q %zmm1, %zmm10, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm1 = ymm8[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm1[0,0,0,0,4,4,4,4]
+; AVX512F-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm10 # 32-byte Reload
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[0,1,1,3,4,5,5,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm6[0,1],ymm1[2],ymm6[3,4],ymm1[5],ymm6[6,7,8,9],ymm1[10],ymm6[11,12],ymm1[13],ymm6[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm24
+; AVX512F-SLOW-NEXT:    vpshufb %ymm13, %ymm8, %ymm1
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm6 = ymm10[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm6[2],ymm1[3,4],ymm6[5],ymm1[6,7,8,9],ymm6[10],ymm1[11,12],ymm6[13],ymm1[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm21
+; AVX512F-SLOW-NEXT:    vpermd (%rax), %zmm11, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm17, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm10[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm8[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm2[1],ymm1[2,3],ymm2[4],ymm1[5,6,7,8],ymm2[9],ymm1[10,11],ymm2[12],ymm1[13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm25[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm7[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,2,6,6,6,6]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1,2],ymm2[3],ymm1[4,5],ymm2[6],ymm1[7,8,9,10],ymm2[11],ymm1[12,13],ymm2[14],ymm1[15]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm1 = ymm5[3,3,3,3,7,7,7,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm3[0,1,2,3,5,6,7,7,8,9,10,11,13,14,15,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm2 = ymm2[2,2,2,3,6,6,6,7]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r9), %xmm7
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%r8), %xmm2
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm2[0],xmm7[0],xmm2[1],xmm7[1],xmm2[2],xmm7[2],xmm2[3],xmm7[3]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm11 = xmm6[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm11, %zmm12, %zmm1
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm11 = ymm22[2,3,3,3,6,7,7,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpbroadcastd 32(%rax), %ymm12
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm12, %zmm11, %zmm25
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm15, %zmm25
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm1
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm12
+; AVX512F-SLOW-NEXT:    vprold $16, %xmm12, %xmm15
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm13 = xmm1[1,1,2,3]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm3 = xmm13[0,1],xmm15[2],xmm13[3,4],xmm15[5],xmm13[6,7]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm3, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm1[0],xmm12[0],xmm1[1],xmm12[1],xmm1[2],xmm12[2],xmm1[3],xmm12[3]
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm1 = xmm12[4],xmm1[4],xmm12[5],xmm1[5],xmm12[6],xmm1[6],xmm12[7],xmm1[7]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm7[4],xmm2[5],xmm7[5],xmm2[6],xmm7[6],xmm2[7],xmm7[7]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm16, %xmm1
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm1, %xmm1
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm3
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm18, %xmm4
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm4
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm13, %xmm4
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm2, %xmm2
+; AVX512F-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm23 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm22 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm22 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm18 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm18 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm17 = mem[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm6[0,1,2,3,4,5,7,6]
+; AVX512F-SLOW-NEXT:    vpermt2d %zmm0, %zmm28, %zmm2
+; AVX512F-SLOW-NEXT:    vpbroadcastd 36(%rax), %ymm0
+; AVX512F-SLOW-NEXT:    vpbroadcastd 40(%rax), %ymm6
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm28
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm28
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm6
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm29, %xmm0
+; AVX512F-SLOW-NEXT:    vpshufb %xmm0, %xmm6, %xmm2
+; AVX512F-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm0
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm0[1,1,2,2]
+; AVX512F-SLOW-NEXT:    vpblendw {{.*#+}} xmm2 = xmm7[0],xmm2[1],xmm7[2,3],xmm2[4],xmm7[5,6],xmm2[7]
+; AVX512F-SLOW-NEXT:    vmovdqu %ymm2, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
+; AVX512F-SLOW-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-SLOW-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm0 = ymm4[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm0[3,3,3,3]
+; AVX512F-SLOW-NEXT:    vpshufd $233, {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm2 = mem[1,2,2,3,5,6,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm10 = ymm4[2,1,2,3,6,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm10 = ymm10[0,0,3,3,4,5,6,7,8,8,11,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm10[2,2,2,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm30, %xmm4
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm10 = xmm4[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm26, %xmm4
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm8 = xmm4[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm8 = xmm8[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %xmm19, %xmm4
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm15 = xmm4[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm12 = xmm14[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm12 = xmm12[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm4 = mem[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm5 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $250, {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm6 = mem[2,2,3,3]
+; AVX512F-SLOW-NEXT:    vpermpd $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vmovups %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm31 = ymm31[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm30 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm30 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm26 = mem[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm19 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm19 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm11 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm11 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq $182, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm16 = mem[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq $234, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm7 = mem[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm27[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm24[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm21[2,2,2,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm23, %zmm22
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm23
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm22, %zmm27, %zmm23
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,1,3,2]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %ymm23, %ymm17, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm17, %ymm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm2 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm1[0,1,2,3],zmm2[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm2 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm1 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm3 = zmm3[0,1,2,3],zmm1[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm17, %zmm3 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm8 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm10, %zmm22, %zmm8
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm17, %zmm8
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm15, %zmm2 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm12, %zmm10 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm2, %zmm22, %zmm10
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm3, %zmm17, %zmm10
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm1, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm1, %zmm18
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm18
+; AVX512F-SLOW-NEXT:    vextracti64x4 $1, %zmm23, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm1, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %ymm2, %ymm3, %ymm29
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm21 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm21 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm0, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm1[0,1,2,3],zmm2[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpternlogq $234, {{[-0-9]+}}(%r{{[sb]}}p), %zmm3, %zmm24 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm29, %zmm0, %zmm2
+; AVX512F-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm0[0,1,2,3],zmm2[4,5,6,7]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm23
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm29 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm29 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm6 # 64-byte Folded Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm19, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm16, %zmm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm2
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm20, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm14, %zmm4
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm3, %zmm4
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm2, %zmm0, %zmm19
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm4, %zmm0, %zmm20
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm31, %zmm0, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm26, %zmm30, %zmm2
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm0, %zmm27, %zmm2
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm0 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm3 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm3 = mem[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm3[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[0,1,1,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
+; AVX512F-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm3 = mem[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm4 = mem[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm3, %zmm3
+; AVX512F-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm4 = mem[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm5 = mem[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm5 = xmm5[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm26 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm26 = mem[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm7 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm7 = mem[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpermq $232, {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm9 = mem[0,2,2,3]
+; AVX512F-SLOW-NEXT:    vpshuflw $180, {{[-0-9]+}}(%r{{[sb]}}p), %xmm11 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm11 = mem[0,1,3,2,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm11 = xmm11[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq $246, {{[-0-9]+}}(%r{{[sb]}}p), %ymm12 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm12 = mem[2,1,3,3]
+; AVX512F-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm13 = mem[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpermq $96, {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm14 = mem[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpshuflw $230, {{[-0-9]+}}(%r{{[sb]}}p), %xmm15 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm15 = mem[2,1,2,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm15 = xmm15[0,1,2,3,4,5,5,4]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[0,0,1,3]
+; AVX512F-SLOW-NEXT:    vpermq $80, {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # ymm16 = mem[0,0,1,1]
+; AVX512F-SLOW-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Folded Reload
+; AVX512F-SLOW-NEXT:    # xmm1 = mem[0,2,3,3,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,2,1]
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm3
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm11[0,1,1,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm9, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm13, %zmm12, %zmm9
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm27, %zmm9
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm11
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm9, %zmm0, %zmm25
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm4, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm7, %zmm26, %zmm3
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm3
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm14, %zmm0
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm16, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $226, %zmm0, %zmm22, %zmm1
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
+; AVX512F-SLOW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm3, %zmm0, %zmm4
+; AVX512F-SLOW-NEXT:    vpternlogq $184, %zmm1, %zmm0, %zmm28
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm24
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm29, %zmm0
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm24, 320(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm28, 256(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm25, 192(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm10, 64(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm17, 448(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm4, 704(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm11, 640(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm19, 576(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm8, 512(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm0, 384(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm21, 768(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm23, 832(%rax)
+; AVX512F-SLOW-NEXT:    addq $2200, %rsp # imm = 0x898
+; AVX512F-SLOW-NEXT:    vzeroupper
+; AVX512F-SLOW-NEXT:    retq
 ;
-; AVX512DQ-FAST-LABEL: store_i16_stride7_vf64:
-; AVX512DQ-FAST:       # %bb.0:
-; AVX512DQ-FAST-NEXT:    subq $1496, %rsp # imm = 0x5D8
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rcx), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm15 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm0, %ymm20
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rdx), %ymm9
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm0, %ymm10
-; AVX512DQ-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm16
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rsi), %ymm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm6, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rdi), %ymm7
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm7, %ymm3
-; AVX512DQ-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm17
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%r9), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%r8), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm4
-; AVX512DQ-FAST-NEXT:    vporq %ymm2, %ymm4, %ymm18
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, (%rsp) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm8
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rdx), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm12
-; AVX512DQ-FAST-NEXT:    vpor %ymm8, %ymm12, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rsi), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rdi), %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm28
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vporq %ymm0, %ymm13, %ymm19
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm10, %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm10, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %ymm10
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm10, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %ymm15
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm13
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %ymm8
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm8, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %ymm4
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm4, %ymm11
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm11, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %ymm13
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %ymm14
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm14, %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm3, %ymm12
-; AVX512DQ-FAST-NEXT:    vporq %ymm5, %ymm0, %ymm21
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm20, %ymm2
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm0 = ymm2[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm11 = ymm9[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm11[0,1],ymm3[2],ymm11[3,4],ymm3[5],ymm11[6,7,8,9],ymm3[10],ymm11[11,12],ymm3[13],ymm11[14,15]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm16, %zmm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm17, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%r8), %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[16,17,u,u,u,u],zero,zero
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %ymm25, %ymm3, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%r9), %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %ymm12, %ymm0, %ymm1
-; AVX512DQ-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,4,u,u,u,5,u,u>
-; AVX512DQ-FAST-NEXT:    vpermd %ymm11, %ymm3, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %ymm0, %ymm17, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm17, %zmm22
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm5, %ymm0
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm5, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [0,1,4,5,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rax), %ymm0
-; AVX512DQ-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm1
-; AVX512DQ-FAST-NEXT:    vpandnq %ymm1, %ymm25, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm20
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm3, %zmm1
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm6, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm3, %ymm26
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5],ymm1[6],ymm3[7,8,9,10],ymm1[11],ymm3[12,13],ymm1[14],ymm3[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm29
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3],ymm3[4],ymm6[5,6,7,8],ymm3[9],ymm6[10,11],ymm3[12],ymm6[13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm6 = [0,2,2,3,10,9,11,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm6, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm7, %ymm27
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm9[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm7[0],ymm1[1],ymm7[2,3],ymm1[4],ymm7[5,6,7,8],ymm1[9],ymm7[10,11],ymm1[12],ymm7[13,14,15]
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
-; AVX512DQ-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm2, %ymm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm16
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm9[2],ymm7[3,4],ymm9[5],ymm7[6,7,8,9],ymm9[10],ymm7[11,12],ymm9[13],ymm7[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [2,2,2,3,8,10,10,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm17, %zmm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm1, %zmm7
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm1, %zmm24
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <5,u,u,u,6,u,u,6>
-; AVX512DQ-FAST-NEXT:    vpermd %ymm11, %ymm1, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 64(%rax), %zmm12
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm3 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
-; AVX512DQ-FAST-NEXT:    # zmm3 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermi2d %zmm12, %zmm0, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 72(%rax), %ymm0
-; AVX512DQ-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rax), %ymm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm20, %ymm5
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm7, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm7, %ymm30
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm11
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%r9), %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%r8), %xmm2
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm2, %xmm20
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [0,0,1,1,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm11
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm0
-; AVX512DQ-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rax), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm3
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm5
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm5
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm7 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm14, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7,8,9,10],ymm3[11],ymm0[12,13],ymm3[14],ymm0[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <0,1,u,3,10,10,11,11>
-; AVX512DQ-FAST-NEXT:    vpermi2q %zmm0, %zmm21, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm26, %ymm9
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm9, %ymm8, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5],ymm0[6],ymm3[7,8,9,10],ymm0[11],ymm3[12,13],ymm0[14],ymm3[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm29, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm4[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6,7,8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm6, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm27, %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm10, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm15[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2,3],ymm0[4],ymm5[5,6,7,8],ymm0[9],ymm5[10,11],ymm0[12],ymm5[13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm16, %ymm4
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm4, %ymm10, %ymm5
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm15[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm24, %zmm16
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm24, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512DQ-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm14, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7,8,9],ymm3[10],ymm0[11,12],ymm3[13],ymm0[14,15]
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm14, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm13[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7,8,9],ymm3[10],ymm5[11,12],ymm3[13],ymm5[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [2,1,3,2,10,10,10,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 (%rax), %zmm19
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm2 = [14,21,0,0,15,22,0,15,14,21,0,0,15,22,0,15]
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rax), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vpermi2d %zmm0, %zmm19, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm28[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm28[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm5[2],ymm3[3,4],ymm5[5],ymm3[6,7,8,9],ymm5[10],ymm3[11,12],ymm5[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm4, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm2[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7,8,9],ymm5[10],ymm0[11,12],ymm5[13],ymm0[14,15]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm1[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm2, %ymm4
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm5
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm5
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm8, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm2[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm8, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm2, %ymm14
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm6[3],ymm3[4,5],ymm6[6],ymm3[7,8,9,10],ymm6[11],ymm3[12,13],ymm6[14],ymm3[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm2, %zmm3
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm30, %zmm12, %zmm0
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm2 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm25, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rcx), %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rdx), %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm2, %xmm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,2,2,3,8,9,9,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm31, %zmm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm9, %ymm13, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm28[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8,9,10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rsi), %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%rdi), %xmm3
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm4, %xmm26
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm3, %xmm24
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [2,1,3,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm2, %zmm3, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm16, %zmm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm15, %ymm8, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm14[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rcx), %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rdx), %xmm1
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm4 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm4, %xmm2, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, %xmm8
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm6 = xmm1[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm7, %xmm14
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [0,1,1,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm5, %zmm1, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm1, %zmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, (%rsp) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 64(%rsi), %xmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm6, %xmm1
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm4[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3,4],xmm1[5],xmm5[6,7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm9, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,1,1,8,8,10,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm5, %zmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm20, %xmm2
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm10, %xmm2, %xmm6
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [0,0,0,1,8,9,9,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm6, %zmm16, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %xmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm8, %xmm4, %xmm6
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm9[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm8[0],xmm6[1],xmm8[2,3],xmm6[4],xmm8[5,6],xmm6[7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm14, %xmm8, %xmm8
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm14, %xmm21
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm6, %zmm11, %zmm8
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %xmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm11, %xmm6
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm4[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm9[0,1],xmm6[2],xmm9[3,4],xmm6[5],xmm9[6,7]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm11[0],xmm4[1],xmm11[1],xmm4[2],xmm11[2],xmm4[3],xmm11[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm3, %xmm28
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm6, %zmm27, %zmm9
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm8, %zmm5, %zmm9
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 64(%rax), %ymm5
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 68(%rax), %ymm6
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm23
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm23
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm2, %zmm23
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm10, %xmm22
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm7, %xmm3
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm5, %zmm16, %zmm1
-; AVX512DQ-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm6
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm20
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm4, %zmm20
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm9, %zmm2, %zmm20
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,10,10,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm5[2],ymm1[3,4],ymm5[5],ymm1[6,7,8,9],ymm5[10],ymm1[11,12],ymm5[13],ymm1[14,15]
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm9[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm7, %ymm15
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm8 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm18, %zmm8
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm8
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vprold $16, %ymm5, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm16[1,2,2,3,5,6,6,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm16[0,0,2,1,4,4,6,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5],ymm5[6],ymm2[7,8,9,10],ymm5[11],ymm2[12,13],ymm5[14],ymm2[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,3,3,10,9,11,10]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,8,8,9]
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%r9), %xmm11
-; AVX512DQ-FAST-NEXT:    vmovdqa 96(%r8), %xmm7
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm11[0],xmm7[1],xmm11[1],xmm7[2],xmm11[2],xmm7[3],xmm11[3]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm1
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm13
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm19, %zmm1
-; AVX512DQ-FAST-NEXT:    vbroadcasti32x8 {{.*#+}} zmm5 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
-; AVX512DQ-FAST-NEXT:    # zmm5 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %zmm1, %zmm5, %zmm19
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm25, %zmm19
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm19
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %xmm12
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %xmm8
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm12[0],xmm8[1],xmm12[1],xmm8[2],xmm12[2],xmm8[3],xmm12[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm9[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm15[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm9[0],ymm2[1],ymm9[2,3],ymm2[4],ymm9[5,6,7,8],ymm2[9],ymm9[10,11],ymm2[12],ymm9[13,14,15]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm1, %zmm31, %zmm15
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm31 = [6,7,3,3,7,7,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm30, %ymm31, %ymm1
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 96(%rax), %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm21
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm13, %zmm30, %zmm21
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm24, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm26, %xmm1
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm26, %xmm1
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm2 = xmm24[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm2[0,1],xmm1[2],xmm2[3,4],xmm1[5],xmm2[6,7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %xmm4
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm28, %xmm6
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm6, %xmm13, %xmm13
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,1,3,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm13, %zmm2, %zmm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,2,1,8,8,9,11]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm24 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm24, %xmm13
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm9[0],xmm0[1],xmm9[2,3],xmm0[4],xmm9[5,6],xmm0[7]
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm2, %zmm27, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm25, %zmm0
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm30, %ymm7
-; AVX512DQ-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm6, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm22, %xmm13
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm13, %xmm10, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,1,1,3,8,8,9,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm2, %zmm28, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm6[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm26 = ymm2[3,3,3,3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm29 = ymm2[2,2,2,2]
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 100(%rax), %ymm2
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 104(%rax), %ymm6
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm2, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm16[3,3,3,3,7,7,7,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm10 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm15[0],xmm3[1],xmm15[1],xmm3[2],xmm15[2],xmm3[3],xmm15[3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm9 = xmm6[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [2,2,2,3,8,8,8,9]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm9, %zmm16, %zmm10
-; AVX512DQ-FAST-NEXT:    vpermd %ymm14, %ymm31, %ymm9
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm18
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm9, %zmm9
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm10, %zmm30, %zmm9
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm2
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
-; AVX512DQ-FAST-NEXT:    vprold $16, %xmm4, %xmm4
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2],xmm5[3,4],xmm4[5],xmm5[6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm10, %xmm0, %xmm0
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm24, %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm0, %xmm12, %xmm0
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm12[4],xmm8[5],xmm12[5],xmm8[6],xmm12[6],xmm8[7],xmm12[7]
-; AVX512DQ-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm8[0],xmm0[1],xmm8[2,3],xmm0[4],xmm8[5,6],xmm0[7]
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm0
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm15[4],xmm3[5],xmm15[5],xmm3[6],xmm15[6],xmm3[7],xmm15[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm13, %xmm6, %xmm4
-; AVX512DQ-FAST-NEXT:    vpermt2q %zmm3, %zmm28, %zmm4
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm3
-; AVX512DQ-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm5
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm5, %zmm3, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm4, %zmm22, %zmm3
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %ymm7, %ymm0, %ymm29
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm0, %ymm11
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %ymm11, %ymm0, %ymm26
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm26, %zmm0, %zmm1
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm29[0,1,2,3],zmm1[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm12
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpckhwd (%rsp), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm10, %xmm5, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm6 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
-; AVX512DQ-FAST-NEXT:    vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # ymm7 = mem[0,1,1,3,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
-; AVX512DQ-FAST-NEXT:    vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # ymm8 = mem[1,1,1,1,5,5,5,5]
-; AVX512DQ-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpermd %ymm10, %ymm8, %ymm8
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[16,17],zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
-; AVX512DQ-FAST-NEXT:    vpandn %ymm8, %ymm11, %ymm8
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm10, %zmm8
-; AVX512DQ-FAST-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm10 = mem[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,2,3,3,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm13
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm10, %zmm25, %zmm1
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm4 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm5 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm1 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm5 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm4, %zmm4
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm6, %zmm6
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm6
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm8
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm8, 320(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm3, 256(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm9, 192(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm19, 128(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm20, (%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm23, 448(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm2, 704(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm21, 640(%rax)
-; AVX512DQ-FAST-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovaps %zmm0, 576(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm13, 384(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm5, 64(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm1, 512(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm12, 832(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm4, 768(%rax)
-; AVX512DQ-FAST-NEXT:    addq $1496, %rsp # imm = 0x5D8
-; AVX512DQ-FAST-NEXT:    vzeroupper
-; AVX512DQ-FAST-NEXT:    retq
+; AVX512F-FAST-LABEL: store_i16_stride7_vf64:
+; AVX512F-FAST:       # %bb.0:
+; AVX512F-FAST-NEXT:    subq $1496, %rsp # imm = 0x5D8
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rcx), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm15 = [128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm0, %ymm20
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rdx), %ymm9
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm0 = <u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm9, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa %ymm0, %ymm10
+; AVX512F-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm16
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rsi), %ymm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm14 = [128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128,128,128,128,128,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm6, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rdi), %ymm7
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm11 = <12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u,u,u,u,u,16,17,18,19>
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm7, %ymm3
+; AVX512F-FAST-NEXT:    vporq %ymm2, %ymm3, %ymm17
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = [128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128,128,128]
+; AVX512F-FAST-NEXT:    vmovdqa 64(%r9), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa 64(%r8), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <u,u,u,u,u,u,u,u,12,13,14,15,128,128,u,u,u,u,u,u,u,u,u,u,16,17,128,128,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm4
+; AVX512F-FAST-NEXT:    vporq %ymm2, %ymm4, %ymm18
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rcx), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, (%rsp) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm8
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rdx), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm0, %ymm12
+; AVX512F-FAST-NEXT:    vpor %ymm8, %ymm12, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rsi), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rdi), %ymm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm28
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vporq %ymm0, %ymm13, %ymm19
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa %ymm10, %ymm2
+; AVX512F-FAST-NEXT:    vpshufb %ymm10, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %ymm10
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm10, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %ymm15
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm15, %ymm13
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %ymm8
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm8, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %ymm4
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm4, %ymm11
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm11, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %ymm13
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %ymm14
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm14, %ymm5
+; AVX512F-FAST-NEXT:    vmovdqa %ymm3, %ymm12
+; AVX512F-FAST-NEXT:    vporq %ymm5, %ymm0, %ymm21
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm20, %ymm2
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm0 = ymm2[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm11 = ymm9[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm11[0,1],ymm0[2],ymm11[3,4],ymm0[5],ymm11[6,7,8,9],ymm0[10],ymm11[11,12],ymm0[13],ymm11[14,15]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,1,3,2]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm11[0,1],ymm3[2],ymm11[3,4],ymm3[5],ymm11[6,7,8,9],ymm3[10],ymm11[11,12],ymm3[13],ymm11[14,15]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,2,3]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm16, %zmm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm17, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vmovdqa 96(%r8), %ymm11
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[14,15,u,u,u,u],zero,zero,zero,zero,zero,zero,zero,zero,ymm11[16,17,u,u,u,u],zero,zero
+; AVX512F-FAST-NEXT:    vpternlogq $248, %ymm25, %ymm3, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 96(%r9), %ymm5
+; AVX512F-FAST-NEXT:    vmovdqu %ymm5, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm12, %ymm5, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm12 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $248, %ymm12, %ymm0, %ymm1
+; AVX512F-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,4,u,u,u,5,u,u>
+; AVX512F-FAST-NEXT:    vpermd %ymm11, %ymm3, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %ymm0, %ymm17, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm17, %zmm22
+; AVX512F-FAST-NEXT:    vprold $16, %ymm5, %ymm0
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,2,2,2]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm5, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm1[0,1,2,3],zmm0[4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [0,1,4,5,4,5,5,7]
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rax), %ymm0
+; AVX512F-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm1
+; AVX512F-FAST-NEXT:    vpandnq %ymm1, %ymm25, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [12,13,128,128,128,128,128,128,128,128,128,128,128,128,14,15,128,128,128,128,128,128,128,128,128,128,128,128,16,17,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm20
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm3, %zmm1
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm3 = <u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29>
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm6, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm3, %ymm26
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm7[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm3[0,1,2],ymm1[3],ymm3[4,5],ymm1[6],ymm3[7,8,9,10],ymm1[11],ymm3[12,13],ymm1[14],ymm3[15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = <10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm6, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm29
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm6[0],ymm3[1],ymm6[2,3],ymm3[4],ymm6[5,6,7,8],ymm3[9],ymm6[10,11],ymm3[12],ymm6[13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm6 = [0,2,2,3,10,9,11,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm6, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = <14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm7, %ymm27
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm7 = ymm9[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm7[0],ymm1[1],ymm7[2,3],ymm1[4],ymm7[5,6,7,8],ymm1[9],ymm7[10,11],ymm1[12],ymm7[13,14,15]
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27,22,23,26,27,0,0,24,25,26,27,0,0,26,27,26,27]
+; AVX512F-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm2, %ymm7
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm16
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm9[2],ymm7[3,4],ymm9[5],ymm7[6,7,8,9],ymm9[10],ymm7[11,12],ymm9[13],ymm7[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [2,2,2,3,8,10,10,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm17, %zmm7
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm1, %zmm7
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm1, %zmm24
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = <5,u,u,u,6,u,u,6>
+; AVX512F-FAST-NEXT:    vpermd %ymm11, %ymm1, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 64(%rax), %zmm12
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm3 = [30,5,0,0,31,6,0,31,30,5,0,0,31,6,0,31]
+; AVX512F-FAST-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermi2d %zmm12, %zmm0, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
+; AVX512F-FAST-NEXT:    vpbroadcastd 72(%rax), %ymm0
+; AVX512F-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rax), %ymm7
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm20, %ymm5
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm7, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm7, %ymm30
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm11
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa 64(%r9), %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 64(%r8), %xmm2
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm2, %xmm20
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [0,0,1,1,12,13,14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm11
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vpbroadcastd 8(%rax), %ymm0
+; AVX512F-FAST-NEXT:    vpandnq %ymm0, %ymm23, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rax), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm3
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm5
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa %xmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm2[4],xmm1[4],xmm2[5],xmm1[5],xmm2[6],xmm1[6],xmm2[7],xmm1[7]
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm3
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $248, %zmm23, %zmm3, %zmm5
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vpbroadcastd {{.*#+}} ymm7 = [18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm14, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1,2],ymm3[3],ymm0[4,5],ymm3[6],ymm0[7,8,9,10],ymm3[11],ymm0[12,13],ymm3[14],ymm0[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = <0,1,u,3,10,10,11,11>
+; AVX512F-FAST-NEXT:    vpermi2q %zmm0, %zmm21, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqu %ymm8, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm26, %ymm9
+; AVX512F-FAST-NEXT:    vpshufb %ymm9, %ymm8, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm4[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1,2],ymm0[3],ymm3[4,5],ymm0[6],ymm3[7,8,9,10],ymm0[11],ymm3[12,13],ymm0[14],ymm3[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm29, %ymm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm8, %ymm3
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm4[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0],ymm3[1],ymm5[2,3],ymm3[4],ymm5[5,6,7,8],ymm3[9],ymm5[10,11],ymm3[12],ymm5[13,14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm6, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqu %ymm10, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm27, %ymm11
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm10, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm15, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm15[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm5[0],ymm0[1],ymm5[2,3],ymm0[4],ymm5[5,6,7,8],ymm0[9],ymm5[10,11],ymm0[12],ymm5[13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm16, %ymm4
+; AVX512F-FAST-NEXT:    vpshufb %ymm4, %ymm10, %ymm5
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm15[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm5[0,1],ymm6[2],ymm5[3,4],ymm6[5],ymm5[6,7,8,9],ymm6[10],ymm5[11,12],ymm6[13],ymm5[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm24, %zmm16
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm24, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm15 = [26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
+; AVX512F-FAST-NEXT:    # ymm15 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm14, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm13[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm3[2],ymm0[3,4],ymm3[5],ymm0[6,7,8,9],ymm3[10],ymm0[11,12],ymm3[13],ymm0[14,15]
+; AVX512F-FAST-NEXT:    vprold $16, %ymm14, %ymm3
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm13[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm5[0,1],ymm3[2],ymm5[3,4],ymm3[5],ymm5[6,7,8,9],ymm3[10],ymm5[11,12],ymm3[13],ymm5[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm18 = [2,1,3,2,10,10,10,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 (%rax), %zmm19
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [14,21,0,0,15,22,0,15,14,21,0,0,15,22,0,15]
+; AVX512F-FAST-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rax), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vpermi2d %zmm0, %zmm19, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm13 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm28[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0],ymm0[1],ymm3[2,3],ymm0[4],ymm3[5,6,7,8],ymm0[9],ymm3[10,11],ymm0[12],ymm3[13,14,15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm13[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm28[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1],ymm5[2],ymm3[3,4],ymm5[5],ymm3[6,7,8,9],ymm5[10],ymm3[11,12],ymm5[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm4, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm2[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm0[0,1],ymm5[2],ymm0[3,4],ymm5[5],ymm0[6,7,8,9],ymm5[10],ymm0[11,12],ymm5[13],ymm0[14,15]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm1[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vmovdqa %ymm2, %ymm4
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm5 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm18, %zmm5
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm3, %zmm5
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vprold $16, %ymm8, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm3 = ymm2[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm3[0,1],ymm0[2],ymm3[3,4],ymm0[5],ymm3[6,7,8,9],ymm0[10],ymm3[11,12],ymm0[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm8, %ymm3
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm2[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vmovdqa %ymm2, %ymm14
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm3 = ymm3[0,1,2],ymm6[3],ymm3[4,5],ymm6[6],ymm3[7,8,9,10],ymm6[11],ymm3[12,13],ymm6[14],ymm3[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,2,3,3,10,9,11,10]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm2, %zmm3
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm30, %zmm12, %zmm0
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
+; AVX512F-FAST-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermd %zmm0, %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm25, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm1 = ymm4[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0],ymm0[1],ymm1[2,3],ymm0[4],ymm1[5,6,7,8],ymm0[9],ymm1[10,11],ymm0[12],ymm1[13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rcx), %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rdx), %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm0 = xmm0[0],xmm3[0],xmm0[1],xmm3[1],xmm0[2],xmm3[2],xmm0[3],xmm3[3]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm2 = [0,1,2,3,0,1,2,3,6,7,4,5,6,7,4,5]
+; AVX512F-FAST-NEXT:    vpshufb %xmm2, %xmm0, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm2, %xmm7
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,2,2,3,8,9,9,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm31, %zmm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm9, %ymm13, %ymm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm28[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm0 = ymm2[0,1,2],ymm0[3],ymm2[4,5],ymm0[6],ymm2[7,8,9,10],ymm0[11],ymm2[12,13],ymm0[14],ymm2[15]
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rsi), %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa 96(%rdi), %xmm3
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm2 = xmm3[0],xmm4[0],xmm3[1],xmm4[1],xmm3[2],xmm4[2],xmm3[3],xmm4[3]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm4, %xmm26
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm3, %xmm24
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = [0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm3 = [2,1,3,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm2, %zmm3, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm16, %zmm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm15, %ymm8, %ymm1
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm14[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm13 = ymm1[0,1],ymm2[2],ymm1[3,4],ymm2[5],ymm1[6,7,8,9],ymm2[10],ymm1[11,12],ymm2[13],ymm1[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rcx), %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rdx), %xmm1
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm4 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
+; AVX512F-FAST-NEXT:    vpshufb %xmm4, %xmm2, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, %xmm8
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm6 = xmm1[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm6[0],xmm5[1],xmm6[2,3],xmm5[4],xmm6[5,6],xmm5[7]
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm4 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm1[0],xmm2[0],xmm1[1],xmm2[1],xmm1[2],xmm2[2],xmm1[3],xmm2[3]
+; AVX512F-FAST-NEXT:    vmovdqa %xmm7, %xmm14
+; AVX512F-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [0,1,1,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm5, %zmm1, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm1, %zmm11
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rdi), %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, (%rsp) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 64(%rsi), %xmm6
+; AVX512F-FAST-NEXT:    vmovdqa %xmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vprold $16, %xmm6, %xmm1
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm4[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm5 = xmm5[0,1],xmm1[2],xmm5[3,4],xmm1[5],xmm5[6,7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm4[0],xmm6[0],xmm4[1],xmm6[1],xmm4[2],xmm6[2],xmm4[3],xmm6[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa %xmm9, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm27 = [0,0,1,1,8,8,10,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm1
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm5, %zmm1
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm20, %xmm2
+; AVX512F-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm2, %xmm2 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm2 = xmm2[0],mem[0],xmm2[1],mem[1],xmm2[2],mem[2],xmm2[3],mem[3]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm7 = [0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [0,1,2,3,8,9,10,11,14,15,12,13,14,15,12,13]
+; AVX512F-FAST-NEXT:    vpshufb %xmm10, %xmm2, %xmm6
+; AVX512F-FAST-NEXT:    vpshufb %xmm7, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [0,0,0,1,8,9,9,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm6, %zmm16, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %xmm9
+; AVX512F-FAST-NEXT:    vmovdqa %xmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %xmm8, %xmm4, %xmm6
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm9[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm8[0],xmm6[1],xmm8[2,3],xmm6[4],xmm8[5,6],xmm6[7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm8 = xmm9[0],xmm4[0],xmm9[1],xmm4[1],xmm9[2],xmm4[2],xmm9[3],xmm4[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm14, %xmm8, %xmm8
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm14, %xmm21
+; AVX512F-FAST-NEXT:    vpermt2q %zmm6, %zmm11, %zmm8
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %xmm11
+; AVX512F-FAST-NEXT:    vmovdqa %xmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vprold $16, %xmm11, %xmm6
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm4[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm6 = xmm9[0,1],xmm6[2],xmm9[3,4],xmm6[5],xmm9[6,7]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm9 = xmm4[0],xmm11[0],xmm4[1],xmm11[1],xmm4[2],xmm11[2],xmm4[3],xmm11[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm9
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm3, %xmm28
+; AVX512F-FAST-NEXT:    vpermt2q %zmm6, %zmm27, %zmm9
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm8, %zmm5, %zmm9
+; AVX512F-FAST-NEXT:    vpbroadcastd 64(%rax), %ymm5
+; AVX512F-FAST-NEXT:    vpbroadcastd 68(%rax), %ymm6
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm23
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm23
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm2, %zmm23
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpcklwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm10, %xmm22
+; AVX512F-FAST-NEXT:    vpshufb %xmm7, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vmovdqa %xmm7, %xmm3
+; AVX512F-FAST-NEXT:    vpermt2q %zmm5, %zmm16, %zmm1
+; AVX512F-FAST-NEXT:    vpbroadcastd (%rax), %ymm5
+; AVX512F-FAST-NEXT:    vpbroadcastd 4(%rax), %ymm6
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm20
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm1, %zmm4, %zmm20
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm9, %zmm2, %zmm20
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm4 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[10,11,8,9,u,u,u,u,u,u,u,u,u,u,u,u,26,27,24,25,u,u,u,u,26,27,24,25,u,u,u,u]
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm17 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0],ymm1[1],ymm2[2,3],ymm1[4],ymm2[5,6,7,8],ymm1[9],ymm2[10,11],ymm1[12],ymm2[13,14,15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm17[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1],ymm5[2],ymm2[3,4],ymm5[5],ymm2[6,7,8,9],ymm5[10],ymm2[11,12],ymm5[13],ymm2[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,10,10,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm9 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm9[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,u,u,24,25,26,27,u,u,26,27,26,27]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm7[2,2,2,2,6,6,6,6]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm1[0,1],ymm5[2],ymm1[3,4],ymm5[5],ymm1[6,7,8,9],ymm5[10],ymm1[11,12],ymm5[13],ymm1[14,15]
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm5 = ymm9[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm7[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vmovdqa %ymm7, %ymm15
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm8 = ymm6[0,1],ymm5[2],ymm6[3,4],ymm5[5],ymm6[6,7,8,9],ymm5[10],ymm6[11,12],ymm5[13],ymm6[14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm18, %zmm8
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm8
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm5 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vprold $16, %ymm5, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %ymm16 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm16[1,2,2,3,5,6,6,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1],ymm1[2],ymm2[3,4],ymm1[5],ymm2[6,7,8,9],ymm1[10],ymm2[11,12],ymm1[13],ymm2[14,15]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,18,19,20,21,18,19,20,21]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm5 = ymm16[0,0,2,1,4,4,6,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm2 = ymm2[0,1,2],ymm5[3],ymm2[4,5],ymm5[6],ymm2[7,8,9,10],ymm5[11],ymm2[12,13],ymm5[14],ymm2[15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,3,3,10,9,11,10]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm5 = [2,2,2,3,8,8,8,9]
+; AVX512F-FAST-NEXT:    vmovdqa 96(%r9), %xmm11
+; AVX512F-FAST-NEXT:    vmovdqa 96(%r8), %xmm7
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm10 = xmm7[0],xmm11[0],xmm7[1],xmm11[1],xmm7[2],xmm11[2],xmm7[3],xmm11[3]
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm1
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm5, %zmm13
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm14 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm14, %zmm19, %zmm1
+; AVX512F-FAST-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [0,13,4,0,0,14,5,0,0,13,4,0,0,14,5,0]
+; AVX512F-FAST-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpermd %zmm1, %zmm5, %zmm19
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm25, %zmm19
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm19
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %xmm12
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %xmm8
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm1 = xmm8[0],xmm12[0],xmm8[1],xmm12[1],xmm8[2],xmm12[2],xmm8[3],xmm12[3]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
+; AVX512F-FAST-NEXT:    vpshufb %xmm2, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm9[14,15,12,13,u,u,u,u,u,u,u,u,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm9 = ymm15[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm15 = ymm9[0],ymm2[1],ymm9[2,3],ymm2[4],ymm9[5,6,7,8],ymm2[9],ymm9[10,11],ymm2[12],ymm9[13,14,15]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm1, %zmm31, %zmm15
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} ymm31 = [6,7,3,3,7,7,6,7]
+; AVX512F-FAST-NEXT:    vpermd %ymm30, %ymm31, %ymm1
+; AVX512F-FAST-NEXT:    vpbroadcastd 96(%rax), %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm21
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm13, %zmm30, %zmm21
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm24, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm26, %xmm1
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm1[4],xmm2[4],xmm1[5],xmm2[5],xmm1[6],xmm2[6],xmm1[7],xmm2[7]
+; AVX512F-FAST-NEXT:    vprold $16, %xmm26, %xmm1
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm2 = xmm24[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm3 = xmm2[0,1],xmm1[2],xmm2[3,4],xmm1[5],xmm2[6,7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,14,15,12,13,u,u,u,u,30,31,28,29,u,u,u,u,30,31,28,29]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm2 = ymm17[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm1 = ymm2[0,1,2],ymm1[3],ymm2[4,5],ymm1[6],ymm2[7,8,9,10],ymm1[11],ymm2[12,13],ymm1[14],ymm2[15]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %xmm5
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %xmm4
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm13 = xmm5[0],xmm4[0],xmm5[1],xmm4[1],xmm5[2],xmm4[2],xmm5[3],xmm4[3]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm28, %xmm6
+; AVX512F-FAST-NEXT:    vpshufb %xmm6, %xmm13, %xmm13
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm2 = [2,1,3,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm13, %zmm2, %zmm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,0,2,1,8,8,9,11]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm9 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm9[4],xmm0[4],xmm9[5],xmm0[5],xmm9[6],xmm0[6],xmm9[7],xmm0[7]
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} xmm24 = [6,7,4,5,0,0,8,9,6,7,4,5,0,0,8,9]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm24, %xmm13
+; AVX512F-FAST-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm9 = xmm9[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm9[0],xmm0[1],xmm9[2,3],xmm0[4],xmm9[5,6],xmm0[7]
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm2[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm2, %zmm27, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm25 = [65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535,65535,0,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm3, %zmm25, %zmm0
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm2 = xmm7[4],xmm11[4],xmm7[5],xmm11[5],xmm7[6],xmm11[6],xmm7[7],xmm11[7]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $184, %ymm3, %ymm30, %ymm7
+; AVX512F-FAST-NEXT:    vextracti64x4 $1, %zmm3, %ymm11
+; AVX512F-FAST-NEXT:    vpshufb %xmm6, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm22, %xmm13
+; AVX512F-FAST-NEXT:    vpshufb %xmm13, %xmm10, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,1,1,3,8,8,9,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm2, %zmm28, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufhw {{.*#+}} ymm2 = ymm6[0,1,2,3,5,5,7,6,8,9,10,11,13,13,15,14]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm26 = ymm2[3,3,3,3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25,22,23,22,23,u,u,u,u,u,u,u,u]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm29 = ymm2[2,2,2,2]
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm15, %zmm1
+; AVX512F-FAST-NEXT:    vpbroadcastd 100(%rax), %ymm2
+; AVX512F-FAST-NEXT:    vpbroadcastd 104(%rax), %ymm6
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm2, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm3, %zmm22, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm18, %ymm3
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm3 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,26,27,28,29,26,27,28,29,26,27,28,29,30,31,30,31]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} ymm6 = ymm16[3,3,3,3,7,7,7,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm10 = ymm3[0,1],ymm6[2],ymm3[3,4],ymm6[5],ymm3[6,7,8,9],ymm6[10],ymm3[11,12],ymm6[13],ymm3[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %xmm3
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} xmm6 = xmm3[0],xmm15[0],xmm3[1],xmm15[1],xmm3[2],xmm15[2],xmm3[3],xmm15[3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm9 = xmm6[0,1,2,3,6,7,4,5,6,7,4,5,12,13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm16 = [2,2,2,3,8,8,8,9]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm9, %zmm16, %zmm10
+; AVX512F-FAST-NEXT:    vpermd %ymm14, %ymm31, %ymm9
+; AVX512F-FAST-NEXT:    vpbroadcastd 32(%rax), %ymm18
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm18, %zmm9, %zmm9
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm10, %zmm30, %zmm9
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm9
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm2
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm0 = xmm4[4],xmm5[4],xmm4[5],xmm5[5],xmm4[6],xmm5[6],xmm4[7],xmm5[7]
+; AVX512F-FAST-NEXT:    vprold $16, %xmm4, %xmm4
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm5 = xmm5[1,1,2,3]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm4 = xmm5[0,1],xmm4[2],xmm5[3,4],xmm4[5],xmm5[6,7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm10 = [4,5,2,3,4,5,6,7,8,9,10,11,10,11,8,9]
+; AVX512F-FAST-NEXT:    vpshufb %xmm10, %xmm0, %xmm0
+; AVX512F-FAST-NEXT:    vpermt2q %zmm0, %zmm17, %zmm4
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm24, %xmm0
+; AVX512F-FAST-NEXT:    vpshufb %xmm0, %xmm12, %xmm0
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm5 = xmm8[4],xmm12[4],xmm8[5],xmm12[5],xmm8[6],xmm12[6],xmm8[7],xmm12[7]
+; AVX512F-FAST-NEXT:    vpshufd {{.*#+}} xmm8 = xmm8[1,1,2,2]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} xmm0 = xmm8[0],xmm0[1],xmm8[2,3],xmm0[4],xmm8[5,6],xmm0[7]
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm5 = xmm5[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermt2q %zmm5, %zmm27, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm0
+; AVX512F-FAST-NEXT:    vpunpckhwd {{.*#+}} xmm3 = xmm3[4],xmm15[4],xmm3[5],xmm15[5],xmm3[6],xmm15[6],xmm3[7],xmm15[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm3 = xmm3[0,1,2,3,4,5,4,5,6,7,10,11,8,9,10,11]
+; AVX512F-FAST-NEXT:    vpshufb %xmm13, %xmm6, %xmm4
+; AVX512F-FAST-NEXT:    vpermt2q %zmm3, %zmm28, %zmm4
+; AVX512F-FAST-NEXT:    vpbroadcastd 36(%rax), %ymm3
+; AVX512F-FAST-NEXT:    vpbroadcastd 40(%rax), %ymm5
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm5, %zmm3, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm4, %zmm22, %zmm3
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm0, %zmm1, %zmm3
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %ymm7, %ymm0, %ymm29
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogd $226, 124(%r8){1to8}, %ymm0, %ymm11
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %ymm11, %ymm0, %ymm26
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm26, %zmm0, %zmm1
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm29[0,1,2,3],zmm1[4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm12
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpckhwd (%rsp), %xmm1, %xmm1 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm1 = xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm4 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm4, %xmm4 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm4 = xmm4[4],mem[4],xmm4[5],mem[5],xmm4[6],mem[6],xmm4[7],mem[7]
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm5 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpckhwd {{[-0-9]+}}(%r{{[sb]}}p), %xmm5, %xmm5 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm5 = xmm5[4],mem[4],xmm5[5],mem[5],xmm5[6],mem[6],xmm5[7],mem[7]
+; AVX512F-FAST-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vpshufb %xmm10, %xmm5, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm6 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpunpcklwd {{.*#+}} ymm6 = ymm6[0,0,1,1,2,2,3,3,8,8,9,9,10,10,11,11]
+; AVX512F-FAST-NEXT:    vpshufd $212, {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    # ymm7 = mem[0,1,1,3,4,5,5,7]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm6 = ymm7[0,1],ymm6[2],ymm7[3,4],ymm6[5],ymm7[6,7,8,9],ymm6[10],ymm7[11,12],ymm6[13],ymm7[14,15]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm7 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18,19,20,21,18,19,20,21,24,25,26,27,22,23,22,23]
+; AVX512F-FAST-NEXT:    vpshufd $85, {{[-0-9]+}}(%r{{[sb]}}p), %ymm8 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    # ymm8 = mem[1,1,1,1,5,5,5,5]
+; AVX512F-FAST-NEXT:    vpblendw {{.*#+}} ymm7 = ymm7[0,1],ymm8[2],ymm7[3,4],ymm8[5],ymm7[6,7,8,9],ymm8[10],ymm7[11,12],ymm8[13],ymm7[14,15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm8 = [0,1,4,5,4,5,5,7]
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpermd %ymm10, %ymm8, %ymm8
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[12,13],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[14,15],zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm10[16,17],zero,zero
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm11 = [65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535,65535,0,65535,65535,65535,65535,65535]
+; AVX512F-FAST-NEXT:    vpandn %ymm8, %ymm11, %ymm8
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm8, %zmm10, %zmm8
+; AVX512F-FAST-NEXT:    vpshuflw $248, {{[-0-9]+}}(%r{{[sb]}}p), %xmm10 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm10 = mem[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,2,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,0,1,3]
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm4 = xmm4[0,2,3,3,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,2,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[0,0,1,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,1,3,2]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,2,3]
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm11, %zmm13
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm10 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm1 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm10, %zmm25, %zmm1
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm4 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm5, %zmm5 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm25, %zmm5
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535,65535,65535,65535,0,0,0,65535]
+; AVX512F-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm1 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $226, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4, %zmm5 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm4, %zmm4
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm6, %zmm6
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm4, %zmm6
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm4 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm8 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm6, %zmm8
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm8, 320(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm3, 256(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm9, 192(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm19, 128(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm20, (%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm23, 448(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm2, 704(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm21, 640(%rax)
+; AVX512F-FAST-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vmovaps %zmm0, 576(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm13, 384(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm5, 64(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm1, 512(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm12, 832(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm4, 768(%rax)
+; AVX512F-FAST-NEXT:    addq $1496, %rsp # imm = 0x5D8
+; AVX512F-FAST-NEXT:    vzeroupper
+; AVX512F-FAST-NEXT:    retq
 ;
 ; AVX512BW-LABEL: store_i16_stride7_vf64:
 ; AVX512BW:       # %bb.0:
@@ -14524,11 +12288,15 @@ define void @store_i16_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
+; AVX512DQ-FAST: {{.*}}
 ; AVX512DQ-ONLY: {{.*}}
+; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
 ; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-ONLY: {{.*}}
+; AVX512F-ONLY-FAST: {{.*}}
+; AVX512F-ONLY-SLOW: {{.*}}
 ; FALLBACK0: {{.*}}
 ; FALLBACK1: {{.*}}
 ; FALLBACK10: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-6.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-6.ll
index e68da022c1863f0..6ef479b87541972 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-6.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-6.ll
@@ -1442,565 +1442,285 @@ define void @store_i64_stride6_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: store_i64_stride6_vf16:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm8
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
-; AVX512F-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
-; AVX512F-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
-; AVX512F-ONLY-NEXT:    movb $12, %r10b
-; AVX512F-ONLY-NEXT:    kmovw %r10d, %k1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    movb $16, %r10b
-; AVX512F-ONLY-NEXT:    kmovw %r10d, %k2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r9), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm16
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
-; AVX512F-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
-; AVX512F-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
-; AVX512F-ONLY-NEXT:    movb $48, %r9b
-; AVX512F-ONLY-NEXT:    kmovw %r9d, %k2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
-; AVX512F-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
-; AVX512F-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
-; AVX512F-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdx), %xmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdx), %xmm21
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
-; AVX512F-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
-; AVX512F-ONLY-NEXT:    # ymm22 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
-; AVX512F-ONLY-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdi), %ymm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm25
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, 256(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, 320(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, 448(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 512(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, 576(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, 640(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, 704(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, 384(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: store_i64_stride6_vf16:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm8
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
-; AVX512DQ-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
-; AVX512DQ-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
-; AVX512DQ-ONLY-NEXT:    movb $12, %r10b
-; AVX512DQ-ONLY-NEXT:    kmovw %r10d, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    movb $16, %r10b
-; AVX512DQ-ONLY-NEXT:    kmovw %r10d, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r9), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm16
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
-; AVX512DQ-ONLY-NEXT:    movb $48, %r9b
-; AVX512DQ-ONLY-NEXT:    kmovw %r9d, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
-; AVX512DQ-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdx), %xmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdx), %xmm21
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # ymm22 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
-; AVX512DQ-ONLY-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdi), %ymm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm25
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, 256(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, 320(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, 448(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 512(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, 576(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, 640(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, 704(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, 384(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: store_i64_stride6_vf16:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm8
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
-; AVX512BW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
-; AVX512BW-ONLY-NEXT:    movb $12, %r10b
-; AVX512BW-ONLY-NEXT:    kmovd %r10d, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    movb $16, %r10b
-; AVX512BW-ONLY-NEXT:    kmovd %r10d, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm16
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
-; AVX512BW-ONLY-NEXT:    movb $48, %r9b
-; AVX512BW-ONLY-NEXT:    kmovd %r9d, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
-; AVX512BW-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdx), %xmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdx), %xmm21
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # ymm22 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
-; AVX512BW-ONLY-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %ymm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm25
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, 256(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, 320(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, 448(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 512(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, 576(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, 640(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, 704(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, 384(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: store_i64_stride6_vf16:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm13
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm5
+; AVX512F-NEXT:    vmovdqa64 (%rsi), %zmm6
+; AVX512F-NEXT:    vmovdqa64 64(%rsi), %zmm14
+; AVX512F-NEXT:    vmovdqa64 64(%rdx), %zmm7
+; AVX512F-NEXT:    vmovdqa64 (%rdx), %zmm2
+; AVX512F-NEXT:    vmovdqa64 (%rcx), %zmm4
+; AVX512F-NEXT:    vmovdqa64 64(%rcx), %zmm11
+; AVX512F-NEXT:    vmovdqa64 (%r8), %zmm3
+; AVX512F-NEXT:    vmovdqa64 64(%r8), %zmm8
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
+; AVX512F-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
+; AVX512F-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
+; AVX512F-NEXT:    movb $12, %r10b
+; AVX512F-NEXT:    kmovw %r10d, %k1
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
+; AVX512F-NEXT:    movb $16, %r10b
+; AVX512F-NEXT:    kmovw %r10d, %k2
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
+; AVX512F-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
+; AVX512F-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqa64 (%r9), %zmm10
+; AVX512F-NEXT:    vmovdqa64 64(%r9), %zmm16
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
+; AVX512F-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
+; AVX512F-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
+; AVX512F-NEXT:    movb $48, %r9b
+; AVX512F-NEXT:    kmovw %r9d, %k2
+; AVX512F-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
+; AVX512F-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
+; AVX512F-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
+; AVX512F-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
+; AVX512F-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
+; AVX512F-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
+; AVX512F-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
+; AVX512F-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
+; AVX512F-NEXT:    vmovdqa64 (%rdx), %xmm20
+; AVX512F-NEXT:    vmovdqa64 64(%rdx), %xmm21
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
+; AVX512F-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
+; AVX512F-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
+; AVX512F-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
+; AVX512F-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
+; AVX512F-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
+; AVX512F-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
+; AVX512F-NEXT:    # ymm22 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
+; AVX512F-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
+; AVX512F-NEXT:    vmovdqa (%rdi), %ymm11
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %ymm25
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
+; AVX512F-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
+; AVX512F-NEXT:    vmovdqa64 %zmm17, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 128(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 192(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm12, 256(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, 320(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm15, 448(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 512(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm0, 576(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm9, 640(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm13, 704(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm19, 384(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm18, (%rax)
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: store_i64_stride6_vf16:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
-; AVX512DQBW-ONLY-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
-; AVX512DQBW-ONLY-NEXT:    movb $12, %r10b
-; AVX512DQBW-ONLY-NEXT:    kmovd %r10d, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    movb $16, %r10b
-; AVX512DQBW-ONLY-NEXT:    kmovd %r10d, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm16
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
-; AVX512DQBW-ONLY-NEXT:    movb $48, %r9b
-; AVX512DQBW-ONLY-NEXT:    kmovd %r9d, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdx), %xmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdx), %xmm21
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm22 = [7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # ymm22 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
-; AVX512DQBW-ONLY-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %ymm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm25
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, 256(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, 320(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, 448(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 512(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, 576(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, 640(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, 704(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, 384(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, (%rax)
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: store_i64_stride6_vf16:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm13
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 (%rsi), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 64(%rsi), %zmm14
+; AVX512BW-NEXT:    vmovdqa64 64(%rdx), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 (%rdx), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 (%rcx), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 64(%rcx), %zmm11
+; AVX512BW-NEXT:    vmovdqa64 (%r8), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 64(%r8), %zmm8
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [4,12,5,13,4,12,5,13]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm0, %zmm1
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [4,12,4,12]
+; AVX512BW-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm9, %zmm10
+; AVX512BW-NEXT:    movb $12, %r10b
+; AVX512BW-NEXT:    kmovd %r10d, %k1
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm1 {%k1}
+; AVX512BW-NEXT:    movb $16, %r10b
+; AVX512BW-NEXT:    kmovd %r10d, %k2
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
+; AVX512BW-NEXT:    vpermi2q %zmm14, %zmm13, %zmm0
+; AVX512BW-NEXT:    vpermi2q %zmm11, %zmm7, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 (%r9), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 64(%r9), %zmm16
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [6,14,6,14,6,14,6,14]
+; AVX512BW-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm18, %zmm15
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm12 = [5,13,6,14,5,13,6,14]
+; AVX512BW-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm12, %zmm9
+; AVX512BW-NEXT:    movb $48, %r9b
+; AVX512BW-NEXT:    kmovd %r9d, %k2
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm9 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm19 = <0,1,13,u,4,5,6,7>
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm19, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,13,4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm20, %zmm9
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm21 = [2,10,2,10,2,10,2,10]
+; AVX512BW-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm21, %zmm22
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm17 = [1,9,2,10,1,9,2,10]
+; AVX512BW-NEXT:    # zmm17 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm17, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm15 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm22 = <0,1,9,u,4,5,6,7>
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm22, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,9,4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm23, %zmm15
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm5, %zmm18
+; AVX512BW-NEXT:    vpermi2q %zmm4, %zmm2, %zmm12
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm12 {%k2}
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm19, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm20, %zmm12
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm5, %zmm21
+; AVX512BW-NEXT:    vpermi2q %zmm4, %zmm2, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm17 {%k2}
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm22, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm23, %zmm17
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm19 = [0,8,1,9,0,8,1,9]
+; AVX512BW-NEXT:    # zmm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm19, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 (%rdx), %xmm20
+; AVX512BW-NEXT:    vmovdqa64 64(%rdx), %xmm21
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm20 = xmm20[0],mem[0]
+; AVX512BW-NEXT:    vinserti32x4 $1, %xmm20, %ymm0, %ymm20
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm20, %zmm0, %zmm18 {%k1}
+; AVX512BW-NEXT:    vinserti32x4 $2, (%r8), %zmm18, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [0,1,2,3,4,8,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm20, %zmm18
+; AVX512BW-NEXT:    vpermi2q %zmm14, %zmm13, %zmm19
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm21 = xmm21[0],mem[0]
+; AVX512BW-NEXT:    vinserti32x4 $1, %xmm21, %ymm0, %ymm21
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm21, %zmm0, %zmm19 {%k1}
+; AVX512BW-NEXT:    vinserti32x4 $2, 64(%r8), %zmm19, %zmm19
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm20, %zmm19
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm20 = [7,15,7,15,7,15,7,15]
+; AVX512BW-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm20, %zmm21
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm22 = [7,15,7,15]
+; AVX512BW-NEXT:    # ymm22 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm22, %zmm13
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm13 = zmm13[0,1,2,3],zmm21[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm14 = <14,u,2,3,4,5,15,u>
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm14, %zmm13
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [0,14,2,3,4,5,6,15]
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm21, %zmm13
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm23 = [0,1,2,3,4,12,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm23, %zmm0
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm24 = [3,11,3,11,3,11,3,11]
+; AVX512BW-NEXT:    # zmm24 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm24, %zmm7
+; AVX512BW-NEXT:    vmovdqa (%rdi), %ymm11
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %ymm25
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm25 = ymm25[1],mem[1],ymm25[3],mem[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm25, %zmm7, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm25 = <10,u,2,3,4,5,11,u>
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm25, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm8 = [0,10,2,3,4,5,6,11]
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm8, %zmm7
+; AVX512BW-NEXT:    vpermi2q %zmm4, %zmm2, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm22, %zmm5
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm5 = zmm5[0,1,2,3],zmm20[4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm14, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm21, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm23, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm24, %zmm2
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm4 = ymm11[1],mem[1],ymm11[3],mem[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm2, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm25, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm10, %zmm8, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 128(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 192(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, 256(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, 320(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, 448(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 512(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, 576(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, 640(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, 704(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, 384(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, (%rax)
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %in.vec0 = load <16 x i64>, ptr %in.vecptr0, align 64
   %in.vec1 = load <16 x i64>, ptr %in.vecptr1, align 64
   %in.vec2 = load <16 x i64>, ptr %in.vecptr2, align 64
@@ -3429,1121 +3149,563 @@ define void @store_i64_stride6_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: store_i64_stride6_vf32:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    subq $712, %rsp # imm = 0x2C8
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm25
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
-; AVX512F-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
-; AVX512F-ONLY-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
-; AVX512F-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
-; AVX512F-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
-; AVX512F-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
-; AVX512F-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
-; AVX512F-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
-; AVX512F-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
-; AVX512F-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm30
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
-; AVX512F-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm7
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    movb $12, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
-; AVX512F-ONLY-NEXT:    movb $48, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r8), %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r9), %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    movb $16, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm23
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, 960(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 896(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, 832(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, 704(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, 640(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, 576(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, 512(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, 448(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 320(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, 256(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, 768(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, 384(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, (%rax)
-; AVX512F-ONLY-NEXT:    addq $712, %rsp # imm = 0x2C8
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: store_i64_stride6_vf32:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    subq $712, %rsp # imm = 0x2C8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm25
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
-; AVX512DQ-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
-; AVX512DQ-ONLY-NEXT:    # ymm20 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
-; AVX512DQ-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
-; AVX512DQ-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm30
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    movb $12, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
-; AVX512DQ-ONLY-NEXT:    movb $48, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r8), %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r9), %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    movb $16, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm23
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, 960(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 896(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, 832(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, 704(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, 640(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, 576(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, 512(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, 448(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 320(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, 256(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, 768(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, 384(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, (%rax)
-; AVX512DQ-ONLY-NEXT:    addq $712, %rsp # imm = 0x2C8
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: store_i64_stride6_vf32:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $712, %rsp # imm = 0x2C8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm25
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
-; AVX512BW-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
-; AVX512BW-ONLY-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
-; AVX512BW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
-; AVX512BW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm7
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    movb $12, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
-; AVX512BW-ONLY-NEXT:    movb $48, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %ymm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    movb $16, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm23
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, 960(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 896(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, 832(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, 704(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, 640(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, 576(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, 512(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, 448(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 320(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, 256(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, 768(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 384(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rax)
-; AVX512BW-ONLY-NEXT:    addq $712, %rsp # imm = 0x2C8
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: store_i64_stride6_vf32:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    subq $712, %rsp # imm = 0x2C8
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm4
+; AVX512F-NEXT:    vmovdqa64 128(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 192(%rdi), %zmm18
+; AVX512F-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512F-NEXT:    vmovdqa64 64(%rsi), %zmm1
+; AVX512F-NEXT:    vmovdqa64 128(%rsi), %zmm19
+; AVX512F-NEXT:    vmovdqa64 192(%rsi), %zmm21
+; AVX512F-NEXT:    vmovdqa64 (%rdx), %zmm8
+; AVX512F-NEXT:    vmovdqa64 64(%rdx), %zmm6
+; AVX512F-NEXT:    vmovdqa64 128(%rdx), %zmm5
+; AVX512F-NEXT:    vmovdqa64 192(%rdx), %zmm12
+; AVX512F-NEXT:    vmovdqa64 (%rcx), %zmm29
+; AVX512F-NEXT:    vmovdqa64 64(%rcx), %zmm27
+; AVX512F-NEXT:    vmovdqa64 128(%rcx), %zmm26
+; AVX512F-NEXT:    vmovdqa64 192(%rcx), %zmm25
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
+; AVX512F-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
+; AVX512F-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
+; AVX512F-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
+; AVX512F-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm31
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
+; AVX512F-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
+; AVX512F-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm18, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
+; AVX512F-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
+; AVX512F-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
+; AVX512F-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
+; AVX512F-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm27
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm19, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
+; AVX512F-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
+; AVX512F-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
+; AVX512F-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
+; AVX512F-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm30
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
+; AVX512F-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
+; AVX512F-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm7
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
+; AVX512F-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
+; AVX512F-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    movb $12, %al
+; AVX512F-NEXT:    kmovw %eax, %k1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
+; AVX512F-NEXT:    movb $48, %al
+; AVX512F-NEXT:    kmovw %eax, %k2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
+; AVX512F-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
+; AVX512F-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
+; AVX512F-NEXT:    vmovdqa64 (%r8), %zmm23
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
+; AVX512F-NEXT:    vmovdqa64 64(%r8), %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
+; AVX512F-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
+; AVX512F-NEXT:    vmovdqa64 128(%r8), %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
+; AVX512F-NEXT:    vmovdqa64 192(%r8), %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
+; AVX512F-NEXT:    vmovdqa64 (%r9), %zmm20
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 64(%r9), %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
+; AVX512F-NEXT:    vmovdqa64 128(%r9), %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
+; AVX512F-NEXT:    vmovdqa64 192(%r9), %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
+; AVX512F-NEXT:    vmovdqa (%rdx), %xmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
+; AVX512F-NEXT:    vmovdqa 64(%rdx), %xmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
+; AVX512F-NEXT:    vmovdqa 128(%rdx), %xmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
+; AVX512F-NEXT:    vmovdqa 192(%rdx), %xmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
+; AVX512F-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
+; AVX512F-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
+; AVX512F-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
+; AVX512F-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
+; AVX512F-NEXT:    vmovdqa (%rdi), %ymm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    movb $16, %al
+; AVX512F-NEXT:    kmovw %eax, %k1
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
+; AVX512F-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
+; AVX512F-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %ymm23
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
+; AVX512F-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
+; AVX512F-NEXT:    vmovdqa 128(%rdi), %ymm1
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
+; AVX512F-NEXT:    vmovdqa 192(%rdi), %ymm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm9, 960(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 896(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm15, 832(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm29, 704(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm14, 640(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm25, 576(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm23, 512(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm31, 448(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 320(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm27, 256(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm11, 192(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm0, 128(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, 768(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm6, 384(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm24, (%rax)
+; AVX512F-NEXT:    addq $712, %rsp # imm = 0x2C8
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: store_i64_stride6_vf32:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $712, %rsp # imm = 0x2C8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm25
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm20 = [4,12,4,12]
-; AVX512DQBW-ONLY-NEXT:    # ymm20 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
-; AVX512DQBW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm19 = [7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # ymm19 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    movb $12, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
-; AVX512DQBW-ONLY-NEXT:    movb $48, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdx), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    movb $16, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %ymm23
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, 960(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 896(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, 832(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, 704(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, 640(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, 576(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, 512(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, 448(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 320(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, 256(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, 768(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 384(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, (%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $712, %rsp # imm = 0x2C8
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: store_i64_stride6_vf32:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $712, %rsp # imm = 0x2C8
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm18
+; AVX512BW-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 64(%rsi), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 128(%rsi), %zmm19
+; AVX512BW-NEXT:    vmovdqa64 192(%rsi), %zmm21
+; AVX512BW-NEXT:    vmovdqa64 (%rdx), %zmm8
+; AVX512BW-NEXT:    vmovdqa64 64(%rdx), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 128(%rdx), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 192(%rdx), %zmm12
+; AVX512BW-NEXT:    vmovdqa64 (%rcx), %zmm29
+; AVX512BW-NEXT:    vmovdqa64 64(%rcx), %zmm27
+; AVX512BW-NEXT:    vmovdqa64 128(%rcx), %zmm26
+; AVX512BW-NEXT:    vmovdqa64 192(%rcx), %zmm25
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm11 = [4,12,5,13,4,12,5,13]
+; AVX512BW-NEXT:    # zmm11 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm20 = [4,12,4,12]
+; AVX512BW-NEXT:    # ymm20 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm25, %zmm20, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm20, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm20, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm29, %zmm8, %zmm20
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm10 = [1,9,2,10,1,9,2,10]
+; AVX512BW-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm13 = [5,13,6,14,5,13,6,14]
+; AVX512BW-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm13, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm31
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm10, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm13, %zmm14
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm10, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm13, %zmm16
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm28 = [3,11,3,11,3,11,3,11]
+; AVX512BW-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm0 = [7,15,7,15,7,15,7,15]
+; AVX512BW-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm0, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm29
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm28, %zmm29
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm0, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm28, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm0, %zmm5
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm11, %zmm26
+; AVX512BW-NEXT:    vpermi2q %zmm25, %zmm12, %zmm10
+; AVX512BW-NEXT:    vpermi2q %zmm25, %zmm12, %zmm13
+; AVX512BW-NEXT:    vpermi2q %zmm25, %zmm12, %zmm28
+; AVX512BW-NEXT:    vpermt2q %zmm25, %zmm0, %zmm12
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm27
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm11, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm11, %zmm25
+; AVX512BW-NEXT:    vpermi2q %zmm22, %zmm7, %zmm11
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [2,10,2,10,2,10,2,10]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm3, %zmm23
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [6,14,6,14,6,14,6,14]
+; AVX512BW-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm6, %zmm24
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,8,1,9,0,8,1,9]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm30
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm2, %zmm30
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm19 = [7,15,7,15]
+; AVX512BW-NEXT:    # ymm19 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm19, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm3, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm6, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm2, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm19, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm6, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm7
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm19, %zmm27
+; AVX512BW-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm18, %zmm3
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm18, %zmm6
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm19, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 %zmm18, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    movb $12, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm26 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm9 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm25 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, %zmm11 {%k1}
+; AVX512BW-NEXT:    movb $48, %al
+; AVX512BW-NEXT:    kmovd %eax, %k2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, %zmm17 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm31 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm14 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm15 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 (%r8), %zmm23
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm16 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 64(%r8), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm10 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,9,u,4,5,6,7>
+; AVX512BW-NEXT:    vpermt2q %zmm23, %zmm0, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm0, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 128(%r8), %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm0, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 192(%r8), %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm0, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 (%r9), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,9,4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm0, %zmm5
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 64(%r9), %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm0, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 128(%r9), %zmm19
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm0, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 192(%r9), %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm0, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm13 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = <0,1,13,u,4,5,6,7>
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm23, %zmm0, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm0, %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm0, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm0, %zmm13
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,13,4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm0, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm0, %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm0, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm0, %zmm13
+; AVX512BW-NEXT:    vmovdqa (%rdx), %xmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm30 {%k1}
+; AVX512BW-NEXT:    vmovdqa 64(%rdx), %xmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm8 {%k1}
+; AVX512BW-NEXT:    vmovdqa 128(%rdx), %xmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm7 {%k1}
+; AVX512BW-NEXT:    vmovdqa 192(%rdx), %xmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm0 = xmm0[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm0, %ymm0, %ymm0
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm0, %zmm2 {%k1}
+; AVX512BW-NEXT:    vinserti32x4 $2, (%r8), %zmm30, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [0,1,2,3,4,8,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm0, %zmm24
+; AVX512BW-NEXT:    vinserti32x4 $2, 64(%r8), %zmm8, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm0, %zmm6
+; AVX512BW-NEXT:    vinserti32x4 $2, 128(%r8), %zmm7, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm0, %zmm5
+; AVX512BW-NEXT:    vinserti32x4 $2, 192(%r8), %zmm2, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm0, %zmm4
+; AVX512BW-NEXT:    vmovdqa (%rdi), %ymm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2, %zmm2 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm2 = zmm2[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    movb $16, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm11 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = <10,u,2,3,4,5,11,u>
+; AVX512BW-NEXT:    vpermt2q %zmm23, %zmm7, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = <14,u,2,3,4,5,15,u>
+; AVX512BW-NEXT:    vpermt2q %zmm23, %zmm17, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %ymm23
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm23 = ymm23[1],mem[1],ymm23[3],mem[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm23, %zmm29, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 (%rsp), %zmm8 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm29 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm29 = zmm8[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm25 {%k1}
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm7, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm17, %zmm29
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %ymm1
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm1[1],mem[1],ymm1[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm8, %zmm30 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm30 = zmm8[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm9 {%k1}
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm7, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm17, %zmm30
+; AVX512BW-NEXT:    vmovdqa 192(%rdi), %ymm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm28, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm7, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm26 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 {{.*#+}} zmm7 = zmm7[0,1,2,3],zmm12[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = [0,10,2,3,4,5,6,11]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm17, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm17 = [0,1,2,3,4,12,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm17, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm22 = [0,14,2,3,4,5,6,15]
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm22, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm28, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm17, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm22, %zmm29
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm28, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm17, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm22, %zmm30
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm28, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm17, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm22, %zmm7
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 1472(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, 1408(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, 1344(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, 1280(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, 1216(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, 1088(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, 1024(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, 960(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 896(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, 832(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, 704(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, 640(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, 576(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, 512(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, 448(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 320(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, 256(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, 192(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, 128(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, 1152(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, 768(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 384(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, (%rax)
+; AVX512BW-NEXT:    addq $712, %rsp # imm = 0x2C8
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %in.vec0 = load <32 x i64>, ptr %in.vecptr0, align 64
   %in.vec1 = load <32 x i64>, ptr %in.vecptr1, align 64
   %in.vec2 = load <32 x i64>, ptr %in.vecptr2, align 64
@@ -7516,2713 +6678,1359 @@ define void @store_i64_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: store_i64_stride6_vf64:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    subq $3400, %rsp # imm = 0xD48
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm21
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
-; AVX512F-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
-; AVX512F-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
-; AVX512F-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
-; AVX512F-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
-; AVX512F-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm11
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
-; AVX512F-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
-; AVX512F-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
-; AVX512F-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
-; AVX512F-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
-; AVX512F-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
-; AVX512F-ONLY-NEXT:    # ymm23 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm30
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    movb $12, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
-; AVX512F-ONLY-NEXT:    movb $48, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r8), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm9
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm11
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm16
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm2
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    movb $16, %al
-; AVX512F-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa 384(%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r9), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 384(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rdx), %xmm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512F-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
-; AVX512F-ONLY-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm8, 2944(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
-; AVX512F-ONLY-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm6, 2752(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm6, 2560(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm5, 2368(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm5, 2176(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm4, 1984(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm3, 1600(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm2, 1216(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1024(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, 960(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, 896(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm1, 832(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, 704(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm1, 640(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm15, 576(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, 512(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, (%rax)
-; AVX512F-ONLY-NEXT:    addq $3400, %rsp # imm = 0xD48
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: store_i64_stride6_vf64:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    subq $3400, %rsp # imm = 0xD48
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm21
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
-; AVX512DQ-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
-; AVX512DQ-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm11
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
-; AVX512DQ-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
-; AVX512DQ-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # ymm23 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm30
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    movb $12, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
-; AVX512DQ-ONLY-NEXT:    movb $48, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r8), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm9
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm11
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm2
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    movb $16, %al
-; AVX512DQ-ONLY-NEXT:    kmovw %eax, %k2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa 384(%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r9), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 384(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rdx), %xmm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm8, 2944(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm6, 2752(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm6, 2560(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm5, 2368(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm5, 2176(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm4, 1984(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm3, 1600(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm2, 1216(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1024(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, 960(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, 896(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm1, 832(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, 704(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm1, 640(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm15, 576(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, 512(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, (%rax)
-; AVX512DQ-ONLY-NEXT:    addq $3400, %rsp # imm = 0xD48
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: store_i64_stride6_vf64:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $3400, %rsp # imm = 0xD48
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm21
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
-; AVX512BW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
-; AVX512BW-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm11
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
-; AVX512BW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
-; AVX512BW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
-; AVX512BW-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # ymm23 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm30
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    movb $12, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
-; AVX512BW-ONLY-NEXT:    movb $48, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm9
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm16
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm2
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    movb $16, %al
-; AVX512BW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rdi), %ymm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa 384(%rdi), %ymm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 384(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rdx), %xmm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm8, 2944(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm6, 2752(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm6, 2560(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm5, 2368(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm5, 2176(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm4, 1984(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm3, 1600(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm2, 1216(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1024(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, 960(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, 896(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm1, 832(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, 704(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm1, 640(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm15, 576(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, 512(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, (%rax)
-; AVX512BW-ONLY-NEXT:    addq $3400, %rsp # imm = 0xD48
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: store_i64_stride6_vf64:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    subq $3400, %rsp # imm = 0xD48
+; AVX512F-NEXT:    vmovdqa64 (%rdx), %zmm13
+; AVX512F-NEXT:    vmovdqa64 64(%rdx), %zmm12
+; AVX512F-NEXT:    vmovdqa64 128(%rdx), %zmm11
+; AVX512F-NEXT:    vmovdqa64 192(%rdx), %zmm10
+; AVX512F-NEXT:    vmovdqa64 256(%rdx), %zmm9
+; AVX512F-NEXT:    vmovdqa64 320(%rdx), %zmm8
+; AVX512F-NEXT:    vmovdqa64 384(%rdx), %zmm7
+; AVX512F-NEXT:    vmovdqa64 448(%rdx), %zmm6
+; AVX512F-NEXT:    vmovdqa64 (%rcx), %zmm0
+; AVX512F-NEXT:    vmovdqa64 64(%rcx), %zmm1
+; AVX512F-NEXT:    vmovdqa64 128(%rcx), %zmm2
+; AVX512F-NEXT:    vmovdqa64 192(%rcx), %zmm30
+; AVX512F-NEXT:    vmovdqa64 256(%rcx), %zmm27
+; AVX512F-NEXT:    vmovdqa64 320(%rcx), %zmm24
+; AVX512F-NEXT:    vmovdqa64 384(%rcx), %zmm22
+; AVX512F-NEXT:    vmovdqa64 448(%rcx), %zmm21
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
+; AVX512F-NEXT:    # ymm3 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
+; AVX512F-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm4
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
+; AVX512F-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
+; AVX512F-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
+; AVX512F-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
+; AVX512F-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
+; AVX512F-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
+; AVX512F-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
+; AVX512F-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%rdi), %zmm4
+; AVX512F-NEXT:    vmovdqa64 448(%rsi), %zmm11
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
+; AVX512F-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 384(%rsi), %zmm13
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %zmm3
+; AVX512F-NEXT:    vmovdqa64 320(%rsi), %zmm15
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 256(%rdi), %zmm31
+; AVX512F-NEXT:    vmovdqa64 256(%rsi), %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 192(%rdi), %zmm29
+; AVX512F-NEXT:    vmovdqa64 192(%rsi), %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 128(%rdi), %zmm25
+; AVX512F-NEXT:    vmovdqa64 128(%rsi), %zmm19
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm5
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm6
+; AVX512F-NEXT:    vmovdqa64 64(%rsi), %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512F-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512F-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
+; AVX512F-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
+; AVX512F-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
+; AVX512F-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
+; AVX512F-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
+; AVX512F-NEXT:    # ymm23 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm20
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm30
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm15
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
+; AVX512F-NEXT:    vmovdqa64 %zmm15, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
+; AVX512F-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
+; AVX512F-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
+; AVX512F-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    movb $12, %al
+; AVX512F-NEXT:    kmovw %eax, %k1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
+; AVX512F-NEXT:    movb $48, %al
+; AVX512F-NEXT:    kmovw %eax, %k2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
+; AVX512F-NEXT:    vmovdqa64 (%r8), %zmm5
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 64(%r8), %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
+; AVX512F-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 128(%r8), %zmm9
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
+; AVX512F-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 192(%r8), %zmm11
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
+; AVX512F-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 256(%r8), %zmm13
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
+; AVX512F-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 320(%r8), %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
+; AVX512F-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 384(%r8), %zmm16
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
+; AVX512F-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%r8), %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
+; AVX512F-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
+; AVX512F-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm20, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
+; AVX512F-NEXT:    vmovdqa64 %zmm18, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
+; AVX512F-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
+; AVX512F-NEXT:    vmovdqa (%rdi), %ymm2
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa 64(%rdi), %ymm2
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    movb $16, %al
+; AVX512F-NEXT:    kmovw %eax, %k2
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 128(%rdi), %ymm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 192(%rdi), %ymm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 256(%rdi), %ymm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa 320(%rdi), %ymm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
+; AVX512F-NEXT:    vmovdqa 384(%rdi), %ymm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
+; AVX512F-NEXT:    vmovdqa 448(%rdi), %ymm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm11
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
+; AVX512F-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm13
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
+; AVX512F-NEXT:    vmovdqa64 (%r9), %zmm6
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 64(%r9), %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 128(%r9), %zmm2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 192(%r9), %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 256(%r9), %zmm4
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
+; AVX512F-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 320(%r9), %zmm5
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 384(%r9), %zmm8
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%r9), %zmm9
+; AVX512F-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
+; AVX512F-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
+; AVX512F-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
+; AVX512F-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
+; AVX512F-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
+; AVX512F-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
+; AVX512F-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa (%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
+; AVX512F-NEXT:    vmovdqa 64(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
+; AVX512F-NEXT:    vmovdqa 128(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
+; AVX512F-NEXT:    vmovdqa 192(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
+; AVX512F-NEXT:    vmovdqa 256(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
+; AVX512F-NEXT:    vmovdqa 320(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
+; AVX512F-NEXT:    vmovdqa 384(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
+; AVX512F-NEXT:    vmovdqa 448(%rdx), %xmm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512F-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
+; AVX512F-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
+; AVX512F-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
+; AVX512F-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
+; AVX512F-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
+; AVX512F-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
+; AVX512F-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
+; AVX512F-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm8, 2944(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
+; AVX512F-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm6, 2752(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm6, 2560(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm5, 2368(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm5, 2176(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm4, 1984(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1792(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm3, 1600(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1408(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm2, 1216(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1024(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm12, 960(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm13, 896(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm1, 832(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm14, 704(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm1, 640(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm15, 576(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm16, 512(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 448(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 320(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 256(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm28, 192(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 128(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 768(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 384(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, (%rax)
+; AVX512F-NEXT:    addq $3400, %rsp # imm = 0xD48
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: store_i64_stride6_vf64:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $3400, %rsp # imm = 0xD48
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
-; AVX512DQBW-ONLY-NEXT:    # ymm3 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
-; AVX512DQBW-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm23 = [7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # ymm23 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    movb $12, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
-; AVX512DQBW-ONLY-NEXT:    movb $48, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm9
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm2
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    movb $16, %al
-; AVX512DQBW-ONLY-NEXT:    kmovd %eax, %k2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rdi), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 384(%rdi), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 384(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rdx), %xmm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm8, 2944(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm6, 2752(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm6, 2560(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm5, 2368(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm5, 2176(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm4, 1984(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm3, 1600(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm2, 1216(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1024(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, 960(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, 896(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm1, 832(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, 704(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm1, 640(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm15, 576(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, 512(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm28, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, (%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $3400, %rsp # imm = 0xD48
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: store_i64_stride6_vf64:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $3400, %rsp # imm = 0xD48
+; AVX512BW-NEXT:    vmovdqa64 (%rdx), %zmm13
+; AVX512BW-NEXT:    vmovdqa64 64(%rdx), %zmm12
+; AVX512BW-NEXT:    vmovdqa64 128(%rdx), %zmm11
+; AVX512BW-NEXT:    vmovdqa64 192(%rdx), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 256(%rdx), %zmm9
+; AVX512BW-NEXT:    vmovdqa64 320(%rdx), %zmm8
+; AVX512BW-NEXT:    vmovdqa64 384(%rdx), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 448(%rdx), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 (%rcx), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 64(%rcx), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 128(%rcx), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 192(%rcx), %zmm30
+; AVX512BW-NEXT:    vmovdqa64 256(%rcx), %zmm27
+; AVX512BW-NEXT:    vmovdqa64 320(%rcx), %zmm24
+; AVX512BW-NEXT:    vmovdqa64 384(%rcx), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 448(%rcx), %zmm21
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm3 = [4,12,4,12]
+; AVX512BW-NEXT:    # ymm3 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm30, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm3, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm0, %zmm13, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [1,9,2,10,1,9,2,10]
+; AVX512BW-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm4
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm5 = [5,13,6,14,5,13,6,14]
+; AVX512BW-NEXT:    # zmm5 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm5, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm4, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [3,11,3,11,3,11,3,11]
+; AVX512BW-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [7,15,7,15,7,15,7,15]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm3, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm3, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm3, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm30, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm30, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm30, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm30, %zmm3, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm3, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm3, %zmm8
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm3, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 %zmm7, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm6, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm6, %zmm5
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm21, %zmm6, %zmm14
+; AVX512BW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm21, %zmm3, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 448(%rsi), %zmm11
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm28 = [4,12,5,13,4,12,5,13]
+; AVX512BW-NEXT:    # zmm28 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 384(%rsi), %zmm13
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm28, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 320(%rsi), %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 256(%rdi), %zmm31
+; AVX512BW-NEXT:    vmovdqa64 256(%rsi), %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm29
+; AVX512BW-NEXT:    vmovdqa64 192(%rsi), %zmm18
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm25
+; AVX512BW-NEXT:    vmovdqa64 128(%rsi), %zmm19
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 64(%rsi), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm28, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512BW-NEXT:    vpermi2q %zmm22, %zmm7, %zmm28
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm12 = [2,10,2,10,2,10,2,10]
+; AVX512BW-NEXT:    # zmm12 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm12, %zmm14
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm10 = [6,14,6,14,6,14,6,14]
+; AVX512BW-NEXT:    # zmm10 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm10, %zmm16
+; AVX512BW-NEXT:    vbroadcasti64x4 {{.*#+}} zmm21 = [0,8,1,9,0,8,1,9]
+; AVX512BW-NEXT:    # zmm21 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm23 = [7,15,7,15]
+; AVX512BW-NEXT:    # ymm23 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm23, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm12, %zmm22
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm10, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm23, %zmm6
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm12, %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm10, %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm23, %zmm25
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm19
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm12, %zmm19
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm10, %zmm27
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm18, %zmm23, %zmm29
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm12, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm10, %zmm9
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm23, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm12, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm30
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm10, %zmm30
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm15, %zmm23, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm15
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm12, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm21, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm23, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm11, %zmm4, %zmm12
+; AVX512BW-NEXT:    vpermi2q %zmm11, %zmm4, %zmm10
+; AVX512BW-NEXT:    vpermi2q %zmm11, %zmm4, %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm23, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    movb $12, %al
+; AVX512BW-NEXT:    kmovd %eax, %k1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm8 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm5 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm1 {%k1}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm28 {%k1}
+; AVX512BW-NEXT:    movb $48, %al
+; AVX512BW-NEXT:    kmovd %eax, %k2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm11 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm23 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm13 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, %zmm22 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, %zmm14 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm20 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, %zmm20 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm16 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm19 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm19 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm24 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm24 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm18 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm26 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm26 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm17 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm17 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm27 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm27 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 (%rsp), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm15 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 (%r8), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,9,u,4,5,6,7>
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm2, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 64(%r8), %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm2, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 128(%r8), %zmm9
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm2, %zmm14
+; AVX512BW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 192(%r8), %zmm11
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm2, %zmm16
+; AVX512BW-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 256(%r8), %zmm13
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm2, %zmm24
+; AVX512BW-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 320(%r8), %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm2, %zmm26
+; AVX512BW-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 384(%r8), %zmm16
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm2, %zmm27
+; AVX512BW-NEXT:    vmovdqu64 %zmm27, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%r8), %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 %zmm15, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm15 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <0,1,13,u,4,5,6,7>
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm2, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm2, %zmm22
+; AVX512BW-NEXT:    vmovdqu64 %zmm22, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm2, %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm2, %zmm19
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm2, %zmm18
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm2, %zmm17
+; AVX512BW-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm2, %zmm15
+; AVX512BW-NEXT:    vmovdqa (%rdi), %ymm2
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm10
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm7, %zmm7 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm7 = zmm7[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa 64(%rdi), %ymm2
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm2 = ymm2[1],mem[1],ymm2[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm0, %zmm0
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6, %zmm6 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm6 = zmm6[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    movb $16, %al
+; AVX512BW-NEXT:    kmovd %eax, %k2
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm1 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = <10,u,2,3,4,5,11,u>
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm2 = <14,u,2,3,4,5,15,u>
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm2, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %ymm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm3
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm25, %zmm6 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm6 = zmm25[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm1, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm2, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm6, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 192(%rdi), %ymm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm3 = ymm3[1],mem[1],ymm3[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm0, %zmm0
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm29, %zmm3 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm3 = zmm29[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm4 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm2, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 256(%rdi), %ymm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm3
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm31, %zmm4 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm4 = zmm31[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm1, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm13, %zmm2, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa 320(%rdi), %ymm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm27
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm25 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm25 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm8 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm8, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm1, %zmm27
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm2, %zmm25
+; AVX512BW-NEXT:    vmovdqa 384(%rdi), %ymm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm22
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm20 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm20 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm1, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm2, %zmm20
+; AVX512BW-NEXT:    vmovdqa 448(%rdi), %ymm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm0[1],mem[1],ymm0[3],mem[3]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm3, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm1, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm11
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm1, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vshufi64x2 $228, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm17 # 64-byte Folded Reload
+; AVX512BW-NEXT:    # zmm17 = zmm0[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm0 {%k2}
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm2, %zmm17
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm2, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm13
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k2}
+; AVX512BW-NEXT:    vmovdqa64 (%r9), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,9,4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm10, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 64(%r9), %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 128(%r9), %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm10, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 192(%r9), %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm10, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 256(%r9), %zmm4
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm10, %zmm5
+; AVX512BW-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 320(%r9), %zmm5
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 384(%r9), %zmm8
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%r9), %zmm9
+; AVX512BW-NEXT:    vmovdqu64 (%rsp), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [0,1,2,13,4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm10, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm10, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm10, %zmm24
+; AVX512BW-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm10, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 %zmm19, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm10, %zmm26
+; AVX512BW-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm10, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa (%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm12 {%k1}
+; AVX512BW-NEXT:    vmovdqa 64(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm14 {%k1}
+; AVX512BW-NEXT:    vmovdqa 128(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm16 {%k1}
+; AVX512BW-NEXT:    vmovdqa 192(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm30 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm30 {%k1}
+; AVX512BW-NEXT:    vmovdqa 256(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm31 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm31 {%k1}
+; AVX512BW-NEXT:    vmovdqa 320(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm1 {%k1}
+; AVX512BW-NEXT:    vmovdqa 384(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm0 {%k1}
+; AVX512BW-NEXT:    vmovdqa 448(%rdx), %xmm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} xmm10 = xmm10[0],mem[0]
+; AVX512BW-NEXT:    vinserti128 $1, %xmm10, %ymm0, %ymm10
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm0, %zmm21 {%k1}
+; AVX512BW-NEXT:    vinserti32x4 $2, (%r8), %zmm12, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,1,2,3,4,8,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm29, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti32x4 $2, 64(%r8), %zmm14, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm29, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti32x4 $2, 128(%r8), %zmm16, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm29, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vinserti32x4 $2, 192(%r8), %zmm30, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm29, %zmm26
+; AVX512BW-NEXT:    vinserti32x4 $2, 256(%r8), %zmm31, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm29, %zmm24
+; AVX512BW-NEXT:    vinserti32x4 $2, 320(%r8), %zmm1, %zmm23
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm29, %zmm23
+; AVX512BW-NEXT:    vinserti32x4 $2, 384(%r8), %zmm0, %zmm19
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm29, %zmm19
+; AVX512BW-NEXT:    vinserti32x4 $2, 448(%r8), %zmm21, %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm29, %zmm21
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm29 = [0,10,2,3,4,5,6,11]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm29, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm11, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm30 = [0,1,2,3,4,12,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm30, %zmm28
+; AVX512BW-NEXT:    vmovdqa64 {{.*#+}} zmm31 = [0,14,2,3,4,5,6,15]
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm31, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm16 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm29, %zmm16
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm15 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm30, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm14 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm31, %zmm14
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm29, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm30, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm31, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm29, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm30, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm31, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm29, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm30, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm31, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm29, %zmm27
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm30, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm31, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm29, %zmm22
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm30, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm31, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm29, %zmm18
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm30, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm31, %zmm17
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, 3008(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm8 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm8, 2944(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 2880(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm18, 2816(%rax)
+; AVX512BW-NEXT:    vmovups (%rsp), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm6, 2752(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, 2624(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm6, 2560(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, 2496(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, 2432(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm5, 2368(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, 2240(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm5, 2176(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, 2112(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, 2048(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm4, 1984(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, 1856(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1792(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, 1728(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 1664(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm3, 1600(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 1472(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1408(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 1344(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, 1280(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm2, 1216(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, 1088(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1024(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, 960(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, 896(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm1, 832(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, 704(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm1, 640(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm15, 576(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, 512(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 448(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 320(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 256(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm28, 192(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 128(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, 2688(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, 2304(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, 1920(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, 1536(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, 1152(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 768(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 384(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rax)
+; AVX512BW-NEXT:    addq $3400, %rsp # imm = 0xD48
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
   %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
   %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
@@ -10249,14 +8057,18 @@ define void @store_i64_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512-FAST: {{.*}}
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
+; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-FAST: {{.*}}
+; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}
 ; AVX512F-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-7.ll
index 1e2a9d022f66c76..0de7beea9398af3 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-7.ll
@@ -2949,8 +2949,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQ-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
 ; AVX512DQ-SLOW-NEXT:    # ymm1 = mem[0,1,0,1]
 ; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQ-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm16 = [15,7,15,7]
-; AVX512DQ-SLOW-NEXT:    # ymm16 = mem[0,1,0,1]
+; AVX512DQ-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [15,7,15,7]
+; AVX512DQ-SLOW-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm4, %zmm10
 ; AVX512DQ-SLOW-NEXT:    vpermt2q %zmm13, %zmm16, %zmm10
 ; AVX512DQ-SLOW-NEXT:    vpermi2q %zmm7, %zmm6, %zmm16
@@ -3156,8 +3156,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQ-FAST-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm7, %zmm17
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm9, %zmm20, %zmm17
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm21 = [6,14,6,14]
-; AVX512DQ-FAST-NEXT:    # ymm21 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm21 = [6,14,6,14]
+; AVX512DQ-FAST-NEXT:    # ymm21 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm3, %zmm18
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm5, %zmm21, %zmm18
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 (%r8), %zmm11
@@ -3215,8 +3215,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQ-FAST-NEXT:    vpermi2q %zmm22, %zmm14, %zmm28
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm1, %zmm29
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm16, %zmm24, %zmm29
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm26 = [15,7,15,7]
-; AVX512DQ-FAST-NEXT:    # ymm26 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [15,7,15,7]
+; AVX512DQ-FAST-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm13, %zmm22
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm0, %zmm26, %zmm22
 ; AVX512DQ-FAST-NEXT:    movb $24, %dil
@@ -3791,8 +3791,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQBW-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
 ; AVX512DQBW-SLOW-NEXT:    # ymm1 = mem[0,1,0,1]
 ; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm6, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm16 = [15,7,15,7]
-; AVX512DQBW-SLOW-NEXT:    # ymm16 = mem[0,1,0,1]
+; AVX512DQBW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [15,7,15,7]
+; AVX512DQBW-SLOW-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm4, %zmm10
 ; AVX512DQBW-SLOW-NEXT:    vpermt2q %zmm13, %zmm16, %zmm10
 ; AVX512DQBW-SLOW-NEXT:    vpermi2q %zmm7, %zmm6, %zmm16
@@ -3998,8 +3998,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQBW-FAST-NEXT:    # zmm20 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm7, %zmm17
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm9, %zmm20, %zmm17
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm21 = [6,14,6,14]
-; AVX512DQBW-FAST-NEXT:    # ymm21 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm21 = [6,14,6,14]
+; AVX512DQBW-FAST-NEXT:    # ymm21 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm3, %zmm18
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm5, %zmm21, %zmm18
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 (%r8), %zmm11
@@ -4057,8 +4057,8 @@ define void @store_i64_stride7_vf16(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQBW-FAST-NEXT:    vpermi2q %zmm22, %zmm14, %zmm28
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm1, %zmm29
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm16, %zmm24, %zmm29
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm26 = [15,7,15,7]
-; AVX512DQBW-FAST-NEXT:    # ymm26 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [15,7,15,7]
+; AVX512DQBW-FAST-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm13, %zmm22
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm0, %zmm26, %zmm22
 ; AVX512DQBW-FAST-NEXT:    movb $24, %dil
@@ -15820,13 +15820,13 @@ define void @store_i64_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm0, %zmm1
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm26, %zmm21, %zmm1
 ; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm30 = [6,14,6,14]
-; AVX512DQ-FAST-NEXT:    # ymm30 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [6,14,6,14]
+; AVX512DQ-FAST-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm0, %zmm1
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm26, %zmm30, %zmm1
 ; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm18 = [15,7,15,7]
-; AVX512DQ-FAST-NEXT:    # ymm18 = mem[0,1,0,1]
+; AVX512DQ-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm18 = [15,7,15,7]
+; AVX512DQ-FAST-NEXT:    # ymm18 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-FAST-NEXT:    vpermt2q %zmm26, %zmm18, %zmm0
 ; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
 ; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
@@ -19661,13 +19661,13 @@ define void @store_i64_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm0, %zmm1
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm26, %zmm21, %zmm1
 ; AVX512DQBW-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm30 = [6,14,6,14]
-; AVX512DQBW-FAST-NEXT:    # ymm30 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [6,14,6,14]
+; AVX512DQBW-FAST-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vmovdqa64 %zmm0, %zmm1
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm26, %zmm30, %zmm1
 ; AVX512DQBW-FAST-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm18 = [15,7,15,7]
-; AVX512DQBW-FAST-NEXT:    # ymm18 = mem[0,1,0,1]
+; AVX512DQBW-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm18 = [15,7,15,7]
+; AVX512DQBW-FAST-NEXT:    # ymm18 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQBW-FAST-NEXT:    vpermt2q %zmm26, %zmm18, %zmm0
 ; AVX512DQBW-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
 ; AVX512DQBW-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-8.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-8.ll
index 6778ae0647ae3f0..8bad8e79ae36168 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-8.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i64-stride-8.ll
@@ -8876,3957 +8876,1981 @@ define void @store_i64_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX2-ONLY-NEXT:    vzeroupper
 ; AVX2-ONLY-NEXT:    retq
 ;
-; AVX512F-ONLY-LABEL: store_i64_stride8_vf64:
-; AVX512F-ONLY:       # %bb.0:
-; AVX512F-ONLY-NEXT:    subq $5512, %rsp # imm = 0x1588
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r8), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm25
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r9), %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%r10), %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%r10), %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 (%rax), %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 64(%rax), %zmm16
-; AVX512F-ONLY-NEXT:    movb $-64, %r11b
-; AVX512F-ONLY-NEXT:    kmovw %r11d, %k1
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
-; AVX512F-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
-; AVX512F-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
-; AVX512F-ONLY-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
-; AVX512F-ONLY-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
-; AVX512F-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
-; AVX512F-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
-; AVX512F-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
-; AVX512F-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
-; AVX512F-ONLY-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm15
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%r10), %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rax), %zmm14
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r10), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rax), %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm29
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%r10), %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rax), %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm24
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm6
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm6
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%r10), %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rax), %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm4
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm5
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%r10), %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rax), %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%r10), %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rax), %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm1
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
-; AVX512F-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
-; AVX512F-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
-; AVX512F-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
-; AVX512F-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm29
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm27
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm21
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm28
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm20
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
-; AVX512F-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
-; AVX512F-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512F-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa (%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdi), %ymm7
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rsi), %ymm2
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm3
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm7
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm7
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm7
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %ymm23
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 384(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 384(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 384(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %ymm18
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rcx), %ymm0
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rdx), %ymm1
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rsi), %ymm3
-; AVX512F-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm10
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
-; AVX512F-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512F-ONLY-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
-; AVX512F-ONLY-NEXT:    vmovdqa (%rdi), %xmm2
-; AVX512F-ONLY-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rsi), %xmm1
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
-; AVX512F-ONLY-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rsi), %xmm1
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
-; AVX512F-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm4
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rsi), %xmm1
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
-; AVX512F-ONLY-NEXT:    vmovdqa 192(%rdi), %xmm12
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rsi), %xmm12
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
-; AVX512F-ONLY-NEXT:    vmovdqa 256(%rdi), %xmm12
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa 320(%rsi), %xmm13
-; AVX512F-ONLY-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
-; AVX512F-ONLY-NEXT:    vmovdqa64 320(%rdi), %xmm18
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rsi), %xmm18
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 384(%rdi), %xmm25
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rsi), %xmm18
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
-; AVX512F-ONLY-NEXT:    vmovdqa64 448(%rdi), %xmm25
-; AVX512F-ONLY-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
-; AVX512F-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
-; AVX512F-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
-; AVX512F-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512F-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
-; AVX512F-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1664(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1216(%rax)
-; AVX512F-ONLY-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1152(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 704(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 640(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 192(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 4032(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3968(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3904(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3840(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3520(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3456(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3392(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3328(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 3008(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2944(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2880(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2816(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2496(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2432(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2368(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 2304(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1984(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1920(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1856(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1472(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1344(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 1280(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 960(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 896(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 832(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm11, 576(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm10, 512(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512F-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512F-ONLY-NEXT:    vmovdqa64 %zmm3, (%rax)
-; AVX512F-ONLY-NEXT:    addq $5512, %rsp # imm = 0x1588
-; AVX512F-ONLY-NEXT:    vzeroupper
-; AVX512F-ONLY-NEXT:    retq
-;
-; AVX512DQ-ONLY-LABEL: store_i64_stride8_vf64:
-; AVX512DQ-ONLY:       # %bb.0:
-; AVX512DQ-ONLY-NEXT:    subq $5512, %rsp # imm = 0x1588
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r8), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm25
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r9), %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%r10), %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%r10), %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 (%rax), %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 64(%rax), %zmm16
-; AVX512DQ-ONLY-NEXT:    movb $-64, %r11b
-; AVX512DQ-ONLY-NEXT:    kmovw %r11d, %k1
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
-; AVX512DQ-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
-; AVX512DQ-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
-; AVX512DQ-ONLY-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
-; AVX512DQ-ONLY-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
-; AVX512DQ-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
-; AVX512DQ-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
-; AVX512DQ-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
-; AVX512DQ-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
-; AVX512DQ-ONLY-NEXT:    # ymm30 = mem[0,1,0,1]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%r10), %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rax), %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r10), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rax), %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm29
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%r10), %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rax), %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm24
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm6
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%r10), %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rax), %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm4
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%r10), %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rax), %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%r10), %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rax), %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
-; AVX512DQ-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
-; AVX512DQ-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
-; AVX512DQ-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
-; AVX512DQ-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm29
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm27
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm21
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm28
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
-; AVX512DQ-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
-; AVX512DQ-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512DQ-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdi), %ymm7
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rsi), %ymm2
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm7
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm7
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm7
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %ymm23
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 384(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 384(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 384(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %ymm18
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rcx), %ymm0
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rdx), %ymm1
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rsi), %ymm3
-; AVX512DQ-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm10
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
-; AVX512DQ-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa (%rdi), %xmm2
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rsi), %xmm1
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rsi), %xmm1
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm4
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rsi), %xmm1
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
-; AVX512DQ-ONLY-NEXT:    vmovdqa 192(%rdi), %xmm12
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rsi), %xmm12
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa 256(%rdi), %xmm12
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa 320(%rsi), %xmm13
-; AVX512DQ-ONLY-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 320(%rdi), %xmm18
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rsi), %xmm18
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 384(%rdi), %xmm25
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rsi), %xmm18
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 448(%rdi), %xmm25
-; AVX512DQ-ONLY-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
-; AVX512DQ-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
-; AVX512DQ-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
-; AVX512DQ-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512DQ-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
-; AVX512DQ-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1664(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1216(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1152(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 704(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 640(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 192(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 4032(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3968(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3904(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3840(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3520(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3456(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3392(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3328(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 3008(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2944(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2880(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2816(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2496(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2432(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2368(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 2304(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1984(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1920(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1856(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1472(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1344(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 1280(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 960(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 896(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 832(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm11, 576(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm10, 512(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512DQ-ONLY-NEXT:    vmovdqa64 %zmm3, (%rax)
-; AVX512DQ-ONLY-NEXT:    addq $5512, %rsp # imm = 0x1588
-; AVX512DQ-ONLY-NEXT:    vzeroupper
-; AVX512DQ-ONLY-NEXT:    retq
-;
-; AVX512BW-ONLY-LABEL: store_i64_stride8_vf64:
-; AVX512BW-ONLY:       # %bb.0:
-; AVX512BW-ONLY-NEXT:    subq $5512, %rsp # imm = 0x1588
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm25
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%r10), %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%r10), %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 (%rax), %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 64(%rax), %zmm16
-; AVX512BW-ONLY-NEXT:    movb $-64, %r11b
-; AVX512BW-ONLY-NEXT:    kmovd %r11d, %k1
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
-; AVX512BW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
-; AVX512BW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
-; AVX512BW-ONLY-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
-; AVX512BW-ONLY-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
-; AVX512BW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
-; AVX512BW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
-; AVX512BW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
-; AVX512BW-ONLY-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%r10), %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rax), %zmm14
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r10), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rax), %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm29
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%r10), %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rax), %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm24
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm6
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm6
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%r10), %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rax), %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm4
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm5
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%r10), %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rax), %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%r10), %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rax), %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm1
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
-; AVX512BW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
-; AVX512BW-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
-; AVX512BW-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
-; AVX512BW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm29
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm27
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm21
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm28
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm20
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
-; AVX512BW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
-; AVX512BW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512BW-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %ymm7
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rsi), %ymm2
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm3
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm7
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm7
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm7
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %ymm23
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 384(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 384(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 384(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %ymm18
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rcx), %ymm0
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rdx), %ymm1
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rsi), %ymm3
-; AVX512BW-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm10
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
-; AVX512BW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
-; AVX512BW-ONLY-NEXT:    vmovdqa (%rdi), %xmm2
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rsi), %xmm1
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
-; AVX512BW-ONLY-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rsi), %xmm1
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
-; AVX512BW-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm4
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rsi), %xmm1
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
-; AVX512BW-ONLY-NEXT:    vmovdqa 192(%rdi), %xmm12
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rsi), %xmm12
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
-; AVX512BW-ONLY-NEXT:    vmovdqa 256(%rdi), %xmm12
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa 320(%rsi), %xmm13
-; AVX512BW-ONLY-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
-; AVX512BW-ONLY-NEXT:    vmovdqa64 320(%rdi), %xmm18
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rsi), %xmm18
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 384(%rdi), %xmm25
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rsi), %xmm18
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
-; AVX512BW-ONLY-NEXT:    vmovdqa64 448(%rdi), %xmm25
-; AVX512BW-ONLY-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
-; AVX512BW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
-; AVX512BW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
-; AVX512BW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512BW-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
-; AVX512BW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1664(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1216(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1152(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 704(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 640(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 192(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 4032(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3968(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3904(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3840(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3520(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3456(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3392(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3328(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 3008(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2944(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2880(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2816(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2496(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2432(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2368(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 2304(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1984(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1920(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1856(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1472(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1344(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 1280(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 960(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 896(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 832(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm11, 576(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm10, 512(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512BW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512BW-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512BW-ONLY-NEXT:    vmovdqa64 %zmm3, (%rax)
-; AVX512BW-ONLY-NEXT:    addq $5512, %rsp # imm = 0x1588
-; AVX512BW-ONLY-NEXT:    vzeroupper
-; AVX512BW-ONLY-NEXT:    retq
+; AVX512F-LABEL: store_i64_stride8_vf64:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    subq $5512, %rsp # imm = 0x1588
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %r10
+; AVX512F-NEXT:    vmovdqa64 128(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 64(%rdi), %zmm4
+; AVX512F-NEXT:    vmovdqa64 (%rdi), %zmm8
+; AVX512F-NEXT:    vmovdqa64 128(%rsi), %zmm17
+; AVX512F-NEXT:    vmovdqa64 64(%rsi), %zmm19
+; AVX512F-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512F-NEXT:    vmovdqa64 64(%rdx), %zmm5
+; AVX512F-NEXT:    vmovdqa64 (%rdx), %zmm10
+; AVX512F-NEXT:    vmovdqa64 64(%rcx), %zmm20
+; AVX512F-NEXT:    vmovdqa64 (%rcx), %zmm11
+; AVX512F-NEXT:    vmovdqa64 (%r8), %zmm1
+; AVX512F-NEXT:    vmovdqa64 64(%r8), %zmm25
+; AVX512F-NEXT:    vmovdqa64 128(%r8), %zmm23
+; AVX512F-NEXT:    vmovdqa64 (%r9), %zmm28
+; AVX512F-NEXT:    vmovdqa64 64(%r9), %zmm26
+; AVX512F-NEXT:    vmovdqa64 128(%r9), %zmm24
+; AVX512F-NEXT:    vmovdqa64 (%r10), %zmm21
+; AVX512F-NEXT:    vmovdqa64 64(%r10), %zmm14
+; AVX512F-NEXT:    vmovdqa64 (%rax), %zmm27
+; AVX512F-NEXT:    vmovdqa64 64(%rax), %zmm16
+; AVX512F-NEXT:    movb $-64, %r11b
+; AVX512F-NEXT:    kmovw %r11d, %k1
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
+; AVX512F-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
+; AVX512F-NEXT:    # ymm6 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
+; AVX512F-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
+; AVX512F-NEXT:    # ymm7 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
+; AVX512F-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm29
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
+; AVX512F-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
+; AVX512F-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
+; AVX512F-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm29, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
+; AVX512F-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm6, %zmm15
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm11
+; AVX512F-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 128(%r10), %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
+; AVX512F-NEXT:    vmovdqa64 128(%rax), %zmm14
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm25, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm11
+; AVX512F-NEXT:    vmovdqa64 128(%rdx), %zmm4
+; AVX512F-NEXT:    vmovdqa64 128(%rcx), %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 192(%r10), %zmm10
+; AVX512F-NEXT:    vmovdqa64 192(%rax), %zmm26
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
+; AVX512F-NEXT:    vmovdqa64 192(%r8), %zmm23
+; AVX512F-NEXT:    vmovdqa64 192(%r9), %zmm29
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512F-NEXT:    vmovdqa64 192(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 192(%rsi), %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
+; AVX512F-NEXT:    vmovdqa64 192(%rdx), %zmm3
+; AVX512F-NEXT:    vmovdqa64 192(%rcx), %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 256(%r10), %zmm10
+; AVX512F-NEXT:    vmovdqa64 256(%rax), %zmm22
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
+; AVX512F-NEXT:    vmovdqa64 256(%r8), %zmm0
+; AVX512F-NEXT:    vmovdqa64 256(%r9), %zmm24
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512F-NEXT:    vmovdqa64 256(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 256(%rsi), %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
+; AVX512F-NEXT:    vmovdqa64 256(%rdx), %zmm3
+; AVX512F-NEXT:    vmovdqa64 256(%rcx), %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
+; AVX512F-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm6
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %zmm2
+; AVX512F-NEXT:    vmovdqa64 320(%rsi), %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
+; AVX512F-NEXT:    vmovdqa64 320(%rdx), %zmm14
+; AVX512F-NEXT:    vmovdqa64 320(%rcx), %zmm6
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm7
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm7
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
+; AVX512F-NEXT:    vmovdqa64 384(%rdx), %zmm0
+; AVX512F-NEXT:    vmovdqa64 384(%rcx), %zmm6
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
+; AVX512F-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%rdx), %zmm0
+; AVX512F-NEXT:    vmovdqa64 448(%rcx), %zmm6
+; AVX512F-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
+; AVX512F-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
+; AVX512F-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 320(%r10), %zmm31
+; AVX512F-NEXT:    vmovdqa64 320(%rax), %zmm12
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
+; AVX512F-NEXT:    vmovdqa64 320(%r8), %zmm17
+; AVX512F-NEXT:    vmovdqa64 320(%r9), %zmm4
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm5
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
+; AVX512F-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %zmm0
+; AVX512F-NEXT:    vmovdqa64 384(%rsi), %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 384(%r10), %zmm11
+; AVX512F-NEXT:    vmovdqa64 384(%rax), %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 384(%r8), %zmm30
+; AVX512F-NEXT:    vmovdqa64 384(%r9), %zmm0
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%r10), %zmm16
+; AVX512F-NEXT:    vmovdqa64 448(%rax), %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%rdi), %zmm3
+; AVX512F-NEXT:    vmovdqa64 448(%rsi), %zmm1
+; AVX512F-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 448(%r8), %zmm19
+; AVX512F-NEXT:    vmovdqa64 448(%r9), %zmm7
+; AVX512F-NEXT:    vmovdqa64 %zmm19, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
+; AVX512F-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm5
+; AVX512F-NEXT:    vmovdqa64 %zmm19, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
+; AVX512F-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
+; AVX512F-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
+; AVX512F-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
+; AVX512F-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512F-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
+; AVX512F-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
+; AVX512F-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
+; AVX512F-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm9, %zmm13
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
+; AVX512F-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
+; AVX512F-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
+; AVX512F-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
+; AVX512F-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
+; AVX512F-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
+; AVX512F-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm29
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm17, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
+; AVX512F-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm27
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
+; AVX512F-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm21
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm28
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
+; AVX512F-NEXT:    vmovdqa64 %zmm30, %zmm20
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
+; AVX512F-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm0
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
+; AVX512F-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
+; AVX512F-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
+; AVX512F-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
+; AVX512F-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
+; AVX512F-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
+; AVX512F-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
+; AVX512F-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
+; AVX512F-NEXT:    vmovdqa (%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa (%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa (%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa (%rdi), %ymm7
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
+; AVX512F-NEXT:    vmovdqa 64(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 64(%rdx), %ymm1
+; AVX512F-NEXT:    vmovdqa 64(%rsi), %ymm2
+; AVX512F-NEXT:    vmovdqa 64(%rdi), %ymm3
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
+; AVX512F-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
+; AVX512F-NEXT:    vmovdqa 128(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 128(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 128(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa 128(%rdi), %ymm7
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512F-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
+; AVX512F-NEXT:    vmovdqa 192(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 192(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 192(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa 192(%rdi), %ymm7
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
+; AVX512F-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
+; AVX512F-NEXT:    vmovdqa 256(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 256(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 256(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa 256(%rdi), %ymm7
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
+; AVX512F-NEXT:    vmovdqa 320(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 320(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 320(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %ymm23
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
+; AVX512F-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
+; AVX512F-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
+; AVX512F-NEXT:    vmovdqa 384(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 384(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 384(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %ymm18
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
+; AVX512F-NEXT:    vmovdqa 448(%rcx), %ymm0
+; AVX512F-NEXT:    vmovdqa 448(%rdx), %ymm1
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512F-NEXT:    vmovdqa 448(%rsi), %ymm3
+; AVX512F-NEXT:    vmovdqa 448(%rdi), %ymm10
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
+; AVX512F-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512F-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
+; AVX512F-NEXT:    vmovdqa (%rsi), %xmm1
+; AVX512F-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
+; AVX512F-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512F-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
+; AVX512F-NEXT:    vmovdqa 64(%rsi), %xmm1
+; AVX512F-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
+; AVX512F-NEXT:    vmovdqa 64(%rdi), %xmm4
+; AVX512F-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
+; AVX512F-NEXT:    vmovdqa 128(%rsi), %xmm1
+; AVX512F-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
+; AVX512F-NEXT:    vmovdqa 128(%rdi), %xmm4
+; AVX512F-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
+; AVX512F-NEXT:    vmovdqa 192(%rsi), %xmm1
+; AVX512F-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
+; AVX512F-NEXT:    vmovdqa 192(%rdi), %xmm12
+; AVX512F-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
+; AVX512F-NEXT:    vmovdqa 256(%rsi), %xmm12
+; AVX512F-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
+; AVX512F-NEXT:    vmovdqa 256(%rdi), %xmm12
+; AVX512F-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
+; AVX512F-NEXT:    vmovdqa 320(%rsi), %xmm13
+; AVX512F-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
+; AVX512F-NEXT:    vmovdqa64 320(%rdi), %xmm18
+; AVX512F-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
+; AVX512F-NEXT:    vmovdqa64 384(%rsi), %xmm18
+; AVX512F-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
+; AVX512F-NEXT:    vmovdqa64 384(%rdi), %xmm25
+; AVX512F-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
+; AVX512F-NEXT:    vmovdqa64 448(%rsi), %xmm18
+; AVX512F-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
+; AVX512F-NEXT:    vmovdqa64 448(%rdi), %xmm25
+; AVX512F-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
+; AVX512F-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
+; AVX512F-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512F-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
+; AVX512F-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
+; AVX512F-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
+; AVX512F-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1664(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1216(%rax)
+; AVX512F-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1152(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 704(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 640(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 192(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 128(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 4032(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3968(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3904(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3840(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3520(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3456(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3392(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3328(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 3008(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2944(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2880(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2816(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2496(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2432(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2368(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 2304(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1984(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1920(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1856(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1792(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1472(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1408(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1344(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 1280(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 960(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 896(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 832(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 768(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm11, 576(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm10, 512(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 448(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 384(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 320(%rax)
+; AVX512F-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-NEXT:    vmovaps %zmm0, 256(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm2, 64(%rax)
+; AVX512F-NEXT:    vmovdqa64 %zmm3, (%rax)
+; AVX512F-NEXT:    addq $5512, %rsp # imm = 0x1588
+; AVX512F-NEXT:    vzeroupper
+; AVX512F-NEXT:    retq
 ;
-; AVX512DQBW-ONLY-LABEL: store_i64_stride8_vf64:
-; AVX512DQBW-ONLY:       # %bb.0:
-; AVX512DQBW-ONLY-NEXT:    subq $5512, %rsp # imm = 0x1588
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdi), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdi), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rsi), %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rsi), %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rsi), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rdx), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rdx), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rcx), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rcx), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r8), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r8), %zmm25
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r8), %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r9), %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r9), %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r9), %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%r10), %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%r10), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 (%rax), %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 64(%rax), %zmm16
-; AVX512DQBW-ONLY-NEXT:    movb $-64, %r11b
-; AVX512DQBW-ONLY-NEXT:    kmovd %r11d, %k1
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
-; AVX512DQBW-ONLY-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
-; AVX512DQBW-ONLY-NEXT:    # ymm6 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
-; AVX512DQBW-ONLY-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
-; AVX512DQBW-ONLY-NEXT:    # ymm7 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
-; AVX512DQBW-ONLY-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm29, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti64x2 {{.*#+}} ymm30 = [7,15,7,15]
-; AVX512DQBW-ONLY-NEXT:    # ymm30 = mem[0,1,0,1]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%r10), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rax), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm25, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rdx), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 128(%rcx), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r10), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rax), %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r8), %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%r9), %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rsi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rdx), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 192(%rcx), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%r10), %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rax), %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%r8), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%r9), %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rsi), %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rdx), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 256(%rcx), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rsi), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdx), %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rcx), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm14, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdx), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rcx), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdx), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rcx), %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%r10), %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rax), %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%r8), %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%r9), %zmm4
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rsi), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%r10), %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rax), %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%r8), %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%r9), %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%r10), %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rax), %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rsi), %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%r8), %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%r9), %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm5
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
-; AVX512DQBW-ONLY-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
-; AVX512DQBW-ONLY-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
-; AVX512DQBW-ONLY-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
-; AVX512DQBW-ONLY-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm27, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm27
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
-; AVX512DQBW-ONLY-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
-; AVX512DQBW-ONLY-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
-; AVX512DQBW-ONLY-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %ymm7
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rsi), %ymm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %ymm7
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdi), %ymm7
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rdi), %ymm7
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %ymm23
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 384(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 384(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 384(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %ymm18
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rcx), %ymm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rdx), %ymm1
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rsi), %ymm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 448(%rdi), %ymm10
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
-; AVX512DQBW-ONLY-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa (%rdi), %xmm2
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rsi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 64(%rdi), %xmm4
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rsi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 128(%rdi), %xmm4
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rsi), %xmm1
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 192(%rdi), %xmm12
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rsi), %xmm12
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 256(%rdi), %xmm12
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa 320(%rsi), %xmm13
-; AVX512DQBW-ONLY-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 320(%rdi), %xmm18
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rsi), %xmm18
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 384(%rdi), %xmm25
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rsi), %xmm18
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 448(%rdi), %xmm25
-; AVX512DQBW-ONLY-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
-; AVX512DQBW-ONLY-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
-; AVX512DQBW-ONLY-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
-; AVX512DQBW-ONLY-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
-; AVX512DQBW-ONLY-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
-; AVX512DQBW-ONLY-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1664(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1216(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1152(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 704(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 640(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 192(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 128(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 4032(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3968(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3904(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3840(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3520(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3456(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3392(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3328(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 3008(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2944(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2880(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2816(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2496(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2432(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2368(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 2304(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1984(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1920(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1856(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1792(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1472(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1408(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1344(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 1280(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 960(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 896(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 832(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 768(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm11, 576(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm10, 512(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 448(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 384(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 320(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQBW-ONLY-NEXT:    vmovaps %zmm0, 256(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm2, 64(%rax)
-; AVX512DQBW-ONLY-NEXT:    vmovdqa64 %zmm3, (%rax)
-; AVX512DQBW-ONLY-NEXT:    addq $5512, %rsp # imm = 0x1588
-; AVX512DQBW-ONLY-NEXT:    vzeroupper
-; AVX512DQBW-ONLY-NEXT:    retq
+; AVX512BW-LABEL: store_i64_stride8_vf64:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    subq $5512, %rsp # imm = 0x1588
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %r10
+; AVX512BW-NEXT:    vmovdqa64 128(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 64(%rdi), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 (%rdi), %zmm8
+; AVX512BW-NEXT:    vmovdqa64 128(%rsi), %zmm17
+; AVX512BW-NEXT:    vmovdqa64 64(%rsi), %zmm19
+; AVX512BW-NEXT:    vmovdqa64 (%rsi), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 64(%rdx), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 (%rdx), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 64(%rcx), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 (%rcx), %zmm11
+; AVX512BW-NEXT:    vmovdqa64 (%r8), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 64(%r8), %zmm25
+; AVX512BW-NEXT:    vmovdqa64 128(%r8), %zmm23
+; AVX512BW-NEXT:    vmovdqa64 (%r9), %zmm28
+; AVX512BW-NEXT:    vmovdqa64 64(%r9), %zmm26
+; AVX512BW-NEXT:    vmovdqa64 128(%r9), %zmm24
+; AVX512BW-NEXT:    vmovdqa64 (%r10), %zmm21
+; AVX512BW-NEXT:    vmovdqa64 64(%r10), %zmm14
+; AVX512BW-NEXT:    vmovdqa64 (%rax), %zmm27
+; AVX512BW-NEXT:    vmovdqa64 64(%rax), %zmm16
+; AVX512BW-NEXT:    movb $-64, %r11b
+; AVX512BW-NEXT:    kmovd %r11d, %k1
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [4,12,4,12,4,12,4,12]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm3, %zmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm12 = zmm1[0],zmm28[0],zmm1[2],zmm28[2],zmm1[4],zmm28[4],zmm1[6],zmm28[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm3, %zmm0
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm6 = [4,12,4,12]
+; AVX512BW-NEXT:    # ymm6 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm6, %zmm15
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm9 = [5,13,5,13,5,13,5,13]
+; AVX512BW-NEXT:    # zmm9 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm9, %zmm0
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm12 = zmm1[1],zmm28[1],zmm1[3],zmm28[3],zmm1[5],zmm28[5],zmm1[7],zmm28[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm12 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm9, %zmm0
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm7 = [5,13,5,13]
+; AVX512BW-NEXT:    # ymm7 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm7, %zmm15
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm0 = ymm0[0,1,2,3],ymm15[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm12, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm13 = [6,14,6,14,6,14,6,14]
+; AVX512BW-NEXT:    # zmm13 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm29
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm13, %zmm0
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm0 {%k1} = zmm21[0],zmm27[0],zmm21[2],zmm27[2],zmm21[4],zmm27[4],zmm21[6],zmm27[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm13, %zmm12
+; AVX512BW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [6,14,6,14]
+; AVX512BW-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm1, %zmm15
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm12 = ymm12[0,1,2,3],ymm15[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm12, %zmm0, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm18 = [7,15,7,15,7,15,7,15]
+; AVX512BW-NEXT:    # zmm18 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm29, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm18, %zmm12
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm12 {%k1} = zmm21[1],zmm27[1],zmm21[3],zmm27[3],zmm21[5],zmm27[5],zmm21[7],zmm27[7]
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm18, %zmm8
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm30 = [7,15,7,15]
+; AVX512BW-NEXT:    # ymm30 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm11, %zmm30, %zmm10
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm12, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm3, %zmm8
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm25[0],zmm26[0],zmm25[2],zmm26[2],zmm25[4],zmm26[4],zmm25[6],zmm26[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm3, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, %zmm15
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm6, %zmm11
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm16, %zmm9, %zmm8
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm10 = zmm25[1],zmm26[1],zmm25[3],zmm26[3],zmm25[5],zmm26[5],zmm25[7],zmm26[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, %zmm10 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm9, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm7, %zmm11
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm11[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm13, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm1, %zmm10
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm8 = ymm8[0,1,2,3],ymm10[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 %zmm25, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm26, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm13, %zmm10
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 %zmm14, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 %zmm16, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm10 {%k1} = zmm14[0],zmm16[0],zmm14[2],zmm16[2],zmm14[4],zmm16[4],zmm14[6],zmm16[6]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm8, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 128(%r10), %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm19, %zmm18, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 128(%rax), %zmm14
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm30, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm25, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm18, %zmm6
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm6 {%k1} = zmm11[1],zmm16[1],zmm11[3],zmm16[3],zmm11[5],zmm16[5],zmm11[7],zmm16[7]
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm5[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm3, %zmm4
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm5 = zmm23[0],zmm24[0],zmm23[2],zmm24[2],zmm23[4],zmm24[4],zmm23[6],zmm24[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm3, %zmm6
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 128(%rdx), %zmm4
+; AVX512BW-NEXT:    vmovdqa64 128(%rcx), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm6, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm14, %zmm9, %zmm5
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm24[1],zmm23[3],zmm24[3],zmm23[5],zmm24[5],zmm23[7],zmm24[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm9, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm13, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm6[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm24, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 %zmm14, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm14[0],zmm10[2],zmm14[2],zmm10[4],zmm14[4],zmm10[6],zmm14[6]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm5, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm17, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm30, %zmm4
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm4[4,5,6,7]
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm14[1],zmm10[3],zmm14[3],zmm10[5],zmm14[5],zmm10[7],zmm14[7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 192(%r10), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 192(%rax), %zmm26
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm11, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 192(%r8), %zmm23
+; AVX512BW-NEXT:    vmovdqa64 192(%r9), %zmm29
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm23[0],zmm29[0],zmm23[2],zmm29[2],zmm23[4],zmm29[4],zmm23[6],zmm29[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 192(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 192(%rsi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
+; AVX512BW-NEXT:    vmovdqa64 192(%rdx), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 192(%rcx), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm9, %zmm4
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm23[1],zmm29[1],zmm23[3],zmm29[3],zmm23[5],zmm29[5],zmm23[7],zmm29[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm13, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm26[0],zmm10[2],zmm26[2],zmm10[4],zmm26[4],zmm10[6],zmm26[6]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm18, %zmm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm26[1],zmm10[3],zmm26[3],zmm10[5],zmm26[5],zmm10[7],zmm26[7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 256(%r10), %zmm10
+; AVX512BW-NEXT:    vmovdqa64 256(%rax), %zmm22
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm11, %zmm2
+; AVX512BW-NEXT:    vmovdqa64 256(%r8), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 256(%r9), %zmm24
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm4 = zmm0[0],zmm24[0],zmm0[2],zmm24[2],zmm0[4],zmm24[4],zmm0[6],zmm24[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 256(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 256(%rsi), %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm11, %zmm6
+; AVX512BW-NEXT:    vmovdqa64 256(%rdx), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 256(%rcx), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm15, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm6 = ymm6[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm6, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm9, %zmm4
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm6 = zmm17[1],zmm24[1],zmm17[3],zmm24[3],zmm17[5],zmm24[5],zmm17[7],zmm24[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm6 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm9, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm12, %zmm8
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm13, %zmm4
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm6
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm4 = ymm4[0,1,2,3],ymm6[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 %zmm17, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm6
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm13, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm6 {%k1} = zmm10[0],zmm22[0],zmm10[2],zmm22[2],zmm10[4],zmm22[4],zmm10[6],zmm22[6]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm6, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm5, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm30, %zmm3
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm2 = ymm2[0,1,2,3],ymm3[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm18, %zmm3
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm3 {%k1} = zmm10[1],zmm22[1],zmm10[3],zmm22[3],zmm10[5],zmm22[5],zmm10[7],zmm22[7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm3, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %zmm2
+; AVX512BW-NEXT:    vmovdqa64 320(%rsi), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm11, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 320(%rdx), %zmm14
+; AVX512BW-NEXT:    vmovdqa64 320(%rcx), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm7
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm15, %zmm7
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm5 = ymm5[0,1,2,3],ymm7[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm7
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm12, %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm14, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm1, %zmm8
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm30, %zmm14
+; AVX512BW-NEXT:    vmovdqa64 384(%rdx), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 384(%rcx), %zmm6
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm15, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm12, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm1, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 %zmm10, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%rdx), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 448(%rcx), %zmm6
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm0, %zmm15
+; AVX512BW-NEXT:    vmovdqu64 %zmm15, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm0, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 %zmm12, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm6, %zmm0, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm6, %zmm30, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 320(%r10), %zmm31
+; AVX512BW-NEXT:    vmovdqa64 320(%rax), %zmm12
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm11, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 320(%r8), %zmm17
+; AVX512BW-NEXT:    vmovdqa64 320(%r9), %zmm4
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm10 = zmm17[0],zmm4[0],zmm17[2],zmm4[2],zmm17[4],zmm4[4],zmm17[6],zmm4[6]
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm10 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm5, %zmm10, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm9, %zmm1
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm5 = zmm17[1],zmm4[1],zmm17[3],zmm4[3],zmm17[5],zmm4[5],zmm17[7],zmm4[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm5 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm9, %zmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm7[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm13, %zmm1
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm1[0,1,2,3],ymm8[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm5
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm13, %zmm5
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm5 {%k1} = zmm31[0],zmm12[0],zmm31[2],zmm12[2],zmm31[4],zmm12[4],zmm31[6],zmm12[6]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpblendd {{.*#+}} ymm1 = ymm2[0,1,2,3],ymm14[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm18, %zmm2
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm2 {%k1} = zmm31[1],zmm12[1],zmm31[3],zmm12[3],zmm31[5],zmm12[5],zmm31[7],zmm12[7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 384(%rsi), %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm11, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm9, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm13, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm18, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 384(%r10), %zmm11
+; AVX512BW-NEXT:    vmovdqa64 384(%rax), %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm2, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm9, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 384(%r8), %zmm30
+; AVX512BW-NEXT:    vmovdqa64 384(%r9), %zmm0
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm13, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%r10), %zmm16
+; AVX512BW-NEXT:    vmovdqa64 448(%rax), %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %zmm3
+; AVX512BW-NEXT:    vmovdqa64 448(%rsi), %zmm1
+; AVX512BW-NEXT:    vpermi2q %zmm1, %zmm3, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm9, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm1, %zmm3, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 448(%r8), %zmm19
+; AVX512BW-NEXT:    vmovdqa64 448(%r9), %zmm7
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm13, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermi2q %zmm1, %zmm3, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 %zmm13, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm1, %zmm18, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm18, %zmm1
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm5
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm18, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm15 = [0,8,0,8,0,8,0,8]
+; AVX512BW-NEXT:    # zmm15 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm15, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm14 = [1,9,1,9,1,9,1,9]
+; AVX512BW-NEXT:    # zmm14 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm14, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm6 = [2,10,2,10,2,10,2,10]
+; AVX512BW-NEXT:    # zmm6 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, %zmm1
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm6, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm1 = [3,11,3,11,3,11,3,11]
+; AVX512BW-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
+; AVX512BW-NEXT:    vpermt2q %zmm27, %zmm1, %zmm21
+; AVX512BW-NEXT:    vmovdqu64 %zmm21, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm6, %zmm25
+; AVX512BW-NEXT:    vpermt2q %zmm28, %zmm1, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 (%rsp), %zmm27 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm14, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm27, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm6, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm3, %zmm1, %zmm27
+; AVX512BW-NEXT:    vmovdqu64 %zmm27, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm6, %zmm18
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm6, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm9, %zmm1, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, %zmm13
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm6, %zmm13
+; AVX512BW-NEXT:    vpermt2q %zmm2, %zmm1, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 %zmm9, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm6, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm26, %zmm1, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm6, %zmm10
+; AVX512BW-NEXT:    vpermt2q %zmm29, %zmm1, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 %zmm23, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm14, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm6, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm22, %zmm1, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm15, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm14, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm6, %zmm26
+; AVX512BW-NEXT:    vpermt2q %zmm24, %zmm1, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm14, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm6, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vpermt2q %zmm12, %zmm1, %zmm31
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm29
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm15, %zmm29
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm14, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm6, %zmm24
+; AVX512BW-NEXT:    vpermt2q %zmm4, %zmm1, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm15, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm2
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm14, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm9 # 64-byte Reload
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm9 {%k1} = zmm11[0],zmm20[0],zmm11[2],zmm20[2],zmm11[4],zmm20[4],zmm11[6],zmm20[6]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm11[1],zmm20[1],zmm11[3],zmm20[3],zmm11[5],zmm20[5],zmm11[7],zmm20[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm27
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm6, %zmm22
+; AVX512BW-NEXT:    vpermt2q %zmm20, %zmm1, %zmm11
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm21
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm15, %zmm21
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm28
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm14, %zmm28
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, %zmm20
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm6, %zmm20
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm2 = zmm30[0],zmm0[0],zmm30[2],zmm0[2],zmm30[4],zmm0[4],zmm30[6],zmm0[6]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm3 = zmm30[1],zmm0[1],zmm30[3],zmm0[3],zmm30[5],zmm0[5],zmm30[7],zmm0[7]
+; AVX512BW-NEXT:    vpermt2q %zmm0, %zmm1, %zmm30
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm15, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm0
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm14, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm4 {%k1} = zmm16[0],zmm8[0],zmm16[2],zmm8[2],zmm16[4],zmm8[4],zmm16[6],zmm8[6]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm5 {%k1} = zmm16[1],zmm8[1],zmm16[3],zmm8[3],zmm16[5],zmm8[5],zmm16[7],zmm8[7]
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm6, %zmm12
+; AVX512BW-NEXT:    vpermt2q %zmm8, %zmm1, %zmm16
+; AVX512BW-NEXT:    vpermi2q %zmm7, %zmm19, %zmm15
+; AVX512BW-NEXT:    vpermi2q %zmm7, %zmm19, %zmm14
+; AVX512BW-NEXT:    vpermi2q %zmm7, %zmm19, %zmm6
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} zmm8 = zmm19[0],zmm7[0],zmm19[2],zmm7[2],zmm19[4],zmm7[4],zmm19[6],zmm7[6]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} zmm0 = zmm19[1],zmm7[1],zmm19[3],zmm7[3],zmm19[5],zmm7[5],zmm19[7],zmm7[7]
+; AVX512BW-NEXT:    vpermt2q %zmm7, %zmm1, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, %zmm2 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm2, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm3 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm3, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm9, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm27, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm8 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm8, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 %zmm1, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm1, %ymm1 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm1 = ymm1[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm0 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm0, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vpblendd $240, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0, %ymm0 # 32-byte Folded Reload
+; AVX512BW-NEXT:    # ymm0 = ymm0[0,1,2,3],mem[4,5,6,7]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm25 {%k1}
+; AVX512BW-NEXT:    vmovdqa (%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa (%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa (%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa (%rdi), %ymm7
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm25, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm18 {%k1}
+; AVX512BW-NEXT:    vmovdqa 64(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 64(%rdx), %ymm1
+; AVX512BW-NEXT:    vmovdqa 64(%rsi), %ymm2
+; AVX512BW-NEXT:    vmovdqa 64(%rdi), %ymm3
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm7 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm3[0],ymm2[0],ymm3[2],ymm2[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm7 = ymm8[2,3],ymm7[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm7, %zmm18, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 %zmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 (%rsp), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm5 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm3[1],ymm2[1],ymm3[3],ymm2[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm5, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm13 {%k1}
+; AVX512BW-NEXT:    vmovdqa 128(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 128(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 128(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %ymm7
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm13, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, (%rsp) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm10 {%k1}
+; AVX512BW-NEXT:    vmovdqa 192(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 192(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 192(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa 192(%rdi), %ymm7
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, %zmm4 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm4, %zmm9
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm26 {%k1}
+; AVX512BW-NEXT:    vmovdqa 256(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 256(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 256(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa 256(%rdi), %ymm7
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm7[0],ymm3[0],ymm7[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm26, %zmm26
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm2 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm7[1],ymm3[1],ymm7[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm2, %zmm7
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, %zmm24 {%k1}
+; AVX512BW-NEXT:    vmovdqa 320(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 320(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 320(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %ymm23
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm8 = ymm23[0],ymm3[0],ymm23[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm8[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm24, %zmm8
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, %zmm17 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm23[1],ymm3[1],ymm23[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm17, %zmm17
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, %zmm20 {%k1}
+; AVX512BW-NEXT:    vmovdqa 384(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 384(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 384(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %ymm18
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm18[0],ymm3[0],ymm18[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm13[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm20, %zmm20
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm30 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm18[1],ymm3[1],ymm18[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm30, %zmm24
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm6 {%k1}
+; AVX512BW-NEXT:    vmovdqa 448(%rcx), %ymm0
+; AVX512BW-NEXT:    vmovdqa 448(%rdx), %ymm1
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm2 = ymm1[0],ymm0[0],ymm1[2],ymm0[2]
+; AVX512BW-NEXT:    vmovdqa 448(%rsi), %ymm3
+; AVX512BW-NEXT:    vmovdqa 448(%rdi), %ymm10
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm10[0],ymm3[0],ymm10[2],ymm3[2]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm2 = ymm12[2,3],ymm2[2,3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm2, %zmm6, %zmm31
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm0 = ymm1[1],ymm0[1],ymm1[3],ymm0[3]
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm10[1],ymm3[1],ymm10[3],ymm3[3]
+; AVX512BW-NEXT:    vperm2i128 {{.*#+}} ymm0 = ymm1[2,3],ymm0[2,3]
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, %zmm19 {%k1}
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm0, %zmm19, %zmm0
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm4 {%k1}
+; AVX512BW-NEXT:    vmovdqa (%rsi), %xmm1
+; AVX512BW-NEXT:    vinserti128 $1, (%rcx), %ymm1, %ymm1
+; AVX512BW-NEXT:    vmovdqa (%rdi), %xmm2
+; AVX512BW-NEXT:    vinserti128 $1, (%rdx), %ymm2, %ymm2
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm3 = ymm2[0],ymm1[0],ymm2[2],ymm1[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm3, %zmm4, %zmm3
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm10 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, %zmm10 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm2[1],ymm1[1],ymm2[3],ymm1[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm10, %zmm2
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm11 {%k1}
+; AVX512BW-NEXT:    vmovdqa 64(%rsi), %xmm1
+; AVX512BW-NEXT:    vinserti128 $1, 64(%rcx), %ymm1, %ymm1
+; AVX512BW-NEXT:    vmovdqa 64(%rdi), %xmm4
+; AVX512BW-NEXT:    vinserti128 $1, 64(%rdx), %ymm4, %ymm4
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm10 = ymm4[0],ymm1[0],ymm4[2],ymm1[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm10, %zmm11, %zmm10
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm11 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, %zmm12 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm4[1],ymm1[1],ymm4[3],ymm1[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm12, %zmm11
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm13 {%k1}
+; AVX512BW-NEXT:    vmovdqa 128(%rsi), %xmm1
+; AVX512BW-NEXT:    vinserti128 $1, 128(%rcx), %ymm1, %ymm1
+; AVX512BW-NEXT:    vmovdqa 128(%rdi), %xmm4
+; AVX512BW-NEXT:    vinserti128 $1, 128(%rdx), %ymm4, %ymm12
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm4 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm4, %zmm13, %zmm4
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm19
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, %zmm18 {%k1}
+; AVX512BW-NEXT:    vmovdqa 192(%rsi), %xmm1
+; AVX512BW-NEXT:    vinserti128 $1, 192(%rcx), %ymm1, %ymm1
+; AVX512BW-NEXT:    vmovdqa 192(%rdi), %xmm12
+; AVX512BW-NEXT:    vinserti128 $1, 192(%rdx), %ymm12, %ymm12
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm13 = ymm12[0],ymm1[0],ymm12[2],ymm1[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm13, %zmm18, %zmm30
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm13 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm18 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, %zmm18 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm1 = ymm12[1],ymm1[1],ymm12[3],ymm1[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm1, %zmm18, %zmm1
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm12 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, %zmm5 {%k1}
+; AVX512BW-NEXT:    vmovdqa 256(%rsi), %xmm12
+; AVX512BW-NEXT:    vinserti128 $1, 256(%rcx), %ymm12, %ymm13
+; AVX512BW-NEXT:    vmovdqa 256(%rdi), %xmm12
+; AVX512BW-NEXT:    vinserti32x4 $1, 256(%rdx), %ymm12, %ymm18
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm12 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm12, %zmm5, %zmm12
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm23 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, %zmm5 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm13, %zmm5, %zmm23
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm29 {%k1}
+; AVX512BW-NEXT:    vmovdqa 320(%rsi), %xmm13
+; AVX512BW-NEXT:    vinserti128 $1, 320(%rcx), %ymm13, %ymm13
+; AVX512BW-NEXT:    vmovdqa64 320(%rdi), %xmm18
+; AVX512BW-NEXT:    vinserti32x4 $1, 320(%rdx), %ymm18, %ymm18
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm18[0],ymm13[0],ymm18[2],ymm13[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm27, %zmm29, %zmm22
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm6 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm13 = ymm18[1],ymm13[1],ymm18[3],ymm13[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm13, %zmm6, %zmm13
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm21 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 384(%rsi), %xmm18
+; AVX512BW-NEXT:    vinserti32x4 $1, 384(%rcx), %ymm18, %ymm18
+; AVX512BW-NEXT:    vmovdqa64 384(%rdi), %xmm25
+; AVX512BW-NEXT:    vinserti32x4 $1, 384(%rdx), %ymm25, %ymm25
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm27, %zmm21, %zmm16
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm28 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm18, %zmm28, %zmm21
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm15 {%k1}
+; AVX512BW-NEXT:    vmovdqa64 448(%rsi), %xmm18
+; AVX512BW-NEXT:    vinserti32x4 $1, 448(%rcx), %ymm18, %ymm18
+; AVX512BW-NEXT:    vmovdqa64 448(%rdi), %xmm25
+; AVX512BW-NEXT:    vinserti32x4 $1, 448(%rdx), %ymm25, %ymm25
+; AVX512BW-NEXT:    vpunpcklqdq {{.*#+}} ymm27 = ymm25[0],ymm18[0],ymm25[2],ymm18[2]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm27, %zmm15, %zmm6
+; AVX512BW-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm5 # 64-byte Reload
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, %zmm14 {%k1}
+; AVX512BW-NEXT:    vpunpckhqdq {{.*#+}} ymm18 = ymm25[1],ymm18[1],ymm25[3],ymm18[3]
+; AVX512BW-NEXT:    vinserti64x4 $0, %ymm18, %zmm14, %zmm5
+; AVX512BW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-NEXT:    vmovdqa64 %zmm0, 3776(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm31, 3712(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm24, 3264(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm20, 3200(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm17, 2752(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm8, 2688(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm7, 2240(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm26, 2176(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm9, 1728(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1664(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1216(%rax)
+; AVX512BW-NEXT:    vmovups (%rsp), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1152(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 704(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 640(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 192(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 128(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 4032(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3968(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3904(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3840(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm5, 3648(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm6, 3584(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3520(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3456(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3392(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3328(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm21, 3136(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm16, 3072(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 3008(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2944(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2880(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2816(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm13, 2624(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm22, 2560(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2496(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2432(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2368(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 2304(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm23, 2112(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm12, 2048(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1984(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1920(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1856(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1792(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm1, 1600(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm30, 1536(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1472(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1408(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1344(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 1280(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm19, 1088(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm4, 1024(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 960(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 896(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 832(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 768(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm11, 576(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm10, 512(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 448(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 384(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 320(%rax)
+; AVX512BW-NEXT:    vmovups {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512BW-NEXT:    vmovaps %zmm0, 256(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm2, 64(%rax)
+; AVX512BW-NEXT:    vmovdqa64 %zmm3, (%rax)
+; AVX512BW-NEXT:    addq $5512, %rsp # imm = 0x1588
+; AVX512BW-NEXT:    vzeroupper
+; AVX512BW-NEXT:    retq
   %in.vec0 = load <64 x i64>, ptr %in.vecptr0, align 64
   %in.vec1 = load <64 x i64>, ptr %in.vecptr1, align 64
   %in.vec2 = load <64 x i64>, ptr %in.vecptr2, align 64
@@ -12856,14 +10880,18 @@ define void @store_i64_stride8_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.ve
 ; AVX512-FAST: {{.*}}
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-FAST: {{.*}}
+; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
 ; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512BW-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
+; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
+; AVX512DQBW-ONLY: {{.*}}
 ; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-FAST: {{.*}}
+; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}
 ; AVX512F-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-5.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-5.ll
index f101f22c58b1f13..f4fda97c0817a8d 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-5.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-5.ll
@@ -4241,185 +4241,185 @@ define void @store_i8_stride5_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512F-FAST-NEXT:    vzeroupper
 ; AVX512F-FAST-NEXT:    retq
 ;
-; AVX512BW-ONLY-SLOW-LABEL: store_i8_stride5_vf64:
-; AVX512BW-ONLY-SLOW:       # %bb.0:
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm8 = [9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm8, %ymm0, %ymm2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm1[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $693250386, %eax # imm = 0x29522952
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %eax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm4, %ymm2 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm4 = xmm12[0],xmm6[0],xmm12[1],xmm6[1],xmm12[2],xmm6[2],xmm12[3],xmm6[3],xmm12[4],xmm6[4],xmm12[5],xmm6[5],xmm12[6],xmm6[6],xmm12[7],xmm6[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = <2,u,1,0,5,4,u,3,u,7,6,11,10,u,9,8>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm4, %xmm4
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm10
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %ymm4
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm15 = [11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm15, %ymm4, %ymm2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %ymm5
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm5[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $1251232404, %eax # imm = 0x4A944A94
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %eax, %k5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm9, %ymm2 {%k5}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,4,5,u,2,3,6,7,10,11,u,8,9,12,13>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm9, %xmm11, %xmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm2, %zmm2
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $1785168781326730801, %rax # imm = 0x18C6318C6318C631
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k4
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k4}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm16
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [6,6,6,6,7,7,7,7,16,16,16,16,16,16,17,17]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermi2d %zmm16, %zmm3, %zmm10
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $2380225041768974402, %rax # imm = 0x2108421084210842
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm23
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [128,128,12,13,128,128,128,128,14,128,128,128,14,15,128,128,128,128,16,128,128,128,16,17,128,128,128,128,18,128,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm23, %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [128,128,128,128,13,128,128,128,128,14,128,128,128,128,15,128,128,128,128,16,128,128,128,128,17,128,128,128,128,18,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm24, %ymm18
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm17, %ymm18, %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm20 = <128,6,128,8,u,128,7,128,9,128,11,u,128,10,128,12>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm20, %xmm12, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm22 = <6,128,8,128,u,7,128,9,128,11,128,u,10,128,12,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm22, %xmm6, %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm6, %zmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <8,128,u,7,128,9,128,u,128,u,10,128,12,128,u,11>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm19, %xmm14, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm21 = <128,8,u,128,7,128,9,u,11,u,128,10,128,12,u,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm21, %xmm13, %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %xmm12, %xmm13, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm12[0,0,1,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = <3,3,3,u,4,4,4,4>
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %ymm25, %ymm12, %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $138547332, %eax # imm = 0x8421084
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %eax, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm26, %ymm17 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm13, %zmm13
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-8330787646191410408, %rax # imm = 0x8C6318C6318C6318
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm6 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <3,3,3,3,u,4,4,4>
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %ymm16, %ymm13, %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm18 = mem[1,1,2,2]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm18[0,1,1,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm17
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $4760450083537948804, %rax # imm = 0x4210842108421084
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k6
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm17, %zmm6 {%k6}
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm17 = [19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm17, %ymm26, %ymm18
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm18[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm18 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm18, %ymm25, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm27, %ymm28, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm15, %ymm26, %ymm15
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm25 = ymm25[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm25 = ymm25[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm25, %ymm15 {%k5}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm27, %zmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,128,21,128,21,20,128,22,128,24,128,22,23,128,25]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm23, %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm26 = ymm26[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm27 = [128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm27 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm24, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm26, %ymm28, %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm8, %ymm24, %ymm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm23 = ymm23[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm23 = ymm23[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm23, %ymm8 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm26, %zmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k4}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = [4,6,5,5,5,5,4,6,6,6,6,6,7,7,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %zmm16, %zmm15, %zmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdx), %xmm16
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-8925843906633654008, %rax # imm = 0x8421084210842108
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm20, %xmm15, %xmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm22, %xmm16, %xmm22
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm20, %xmm22, %xmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm15[0],xmm16[0],xmm15[1],xmm16[1],xmm15[2],xmm16[2],xmm15[3],xmm16[3],xmm15[4],xmm16[4],xmm15[5],xmm16[5],xmm15[6],xmm16[6],xmm15[7],xmm16[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm7, %xmm15, %xmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm20, %zmm7, %zmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdi), %xmm16
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm19, %xmm16, %xmm19
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm21, %xmm15, %xmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm19, %xmm20, %xmm19
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm16[0],xmm15[0],xmm16[1],xmm15[1],xmm16[2],xmm15[2],xmm16[3],xmm15[3],xmm16[4],xmm15[4],xmm16[5],xmm15[5],xmm16[6],xmm15[6],xmm16[7],xmm15[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm9, %xmm15, %xmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm19, %zmm9, %zmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[0,0,1,1,4,4,5,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[0,0,1,1,4,4,5,5]
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-4165393823095705204, %rax # imm = 0xC6318C6318C6318C
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm7, %zmm9 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [0,0,0,0,0,0,1,1,1,1,2,2,2,2,2,2]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %zmm3, %zmm7, %zmm3
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $595056260442243600, %rax # imm = 0x842108421084210
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm9 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm1, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm7
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm11, %ymm0, %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %ymm5, %ymm12, %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm4, %ymm1 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm17, %ymm4, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm18, %ymm5, %ymm4
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,3,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm3, %ymm4, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpermd %ymm0, %ymm13, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,2,1,1,4,6,5,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,3,2]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $1190112520884487201, %rax # imm = 0x1084210842108421
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm1, 64(%r9)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm9, (%r9)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm8, 256(%r9)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm6, 192(%r9)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%r9)
-; AVX512BW-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512BW-ONLY-SLOW-NEXT:    retq
+; AVX512BW-SLOW-LABEL: store_i8_stride5_vf64:
+; AVX512BW-SLOW:       # %bb.0:
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm3
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rcx), %ymm0
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm8 = [9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm8, %ymm0, %ymm2
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdx), %ymm1
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm1[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    movl $693250386, %eax # imm = 0x29522952
+; AVX512BW-SLOW-NEXT:    kmovd %eax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm4, %ymm2 {%k1}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm6
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm12
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm4 = xmm12[0],xmm6[0],xmm12[1],xmm6[1],xmm12[2],xmm6[2],xmm12[3],xmm6[3],xmm12[4],xmm6[4],xmm12[5],xmm6[5],xmm12[6],xmm6[6],xmm12[7],xmm6[7]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = <2,u,1,0,5,4,u,3,u,7,6,11,10,u,9,8>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm7, %xmm4, %xmm4
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm10
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rsi), %ymm4
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm15 = [11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm15, %ymm4, %ymm2
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdi), %ymm5
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm5[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    movl $1251232404, %eax # imm = 0x4A944A94
+; AVX512BW-SLOW-NEXT:    kmovd %eax, %k5
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm9, %ymm2 {%k5}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm13
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm14
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,4,5,u,2,3,6,7,10,11,u,8,9,12,13>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm9, %xmm11, %xmm11
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm2, %zmm2
+; AVX512BW-SLOW-NEXT:    movabsq $1785168781326730801, %rax # imm = 0x18C6318C6318C631
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k4
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k4}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm16
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [6,6,6,6,7,7,7,7,16,16,16,16,16,16,17,17]
+; AVX512BW-SLOW-NEXT:    vpermi2d %zmm16, %zmm3, %zmm10
+; AVX512BW-SLOW-NEXT:    movabsq $2380225041768974402, %rax # imm = 0x2108421084210842
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k2
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k2}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm23
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [128,128,12,13,128,128,128,128,14,128,128,128,14,15,128,128,128,128,16,128,128,128,16,17,128,128,128,128,18,128,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm10, %ymm23, %ymm17
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm24
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [128,128,128,128,13,128,128,128,128,14,128,128,128,128,15,128,128,128,128,16,128,128,128,128,17,128,128,128,128,18,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm11, %ymm24, %ymm18
+; AVX512BW-SLOW-NEXT:    vporq %ymm17, %ymm18, %ymm17
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm20 = <128,6,128,8,u,128,7,128,9,128,11,u,128,10,128,12>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm20, %xmm12, %xmm12
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm22 = <6,128,8,128,u,7,128,9,128,11,128,u,10,128,12,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm22, %xmm6, %xmm6
+; AVX512BW-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm6, %zmm6
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <8,128,u,7,128,9,128,u,128,u,10,128,12,128,u,11>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm19, %xmm14, %xmm12
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm21 = <128,8,u,128,7,128,9,u,11,u,128,10,128,12,u,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm21, %xmm13, %xmm13
+; AVX512BW-SLOW-NEXT:    vpor %xmm12, %xmm13, %xmm12
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm12[0,0,1,1]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm25
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = <3,3,3,u,4,4,4,4>
+; AVX512BW-SLOW-NEXT:    vpermd %ymm25, %ymm12, %ymm17
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm26
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14]
+; AVX512BW-SLOW-NEXT:    movl $138547332, %eax # imm = 0x8421084
+; AVX512BW-SLOW-NEXT:    kmovd %eax, %k3
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm14, %ymm26, %ymm17 {%k3}
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm13, %zmm13
+; AVX512BW-SLOW-NEXT:    movabsq $-8330787646191410408, %rax # imm = 0x8C6318C6318C6318
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k2
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm6 {%k2}
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <3,3,3,3,u,4,4,4>
+; AVX512BW-SLOW-NEXT:    vpermd %ymm16, %ymm13, %ymm17
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} xmm18 = mem[1,1,2,2]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm18[0,1,1,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm17
+; AVX512BW-SLOW-NEXT:    movabsq $4760450083537948804, %rax # imm = 0x4210842108421084
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k6
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm17, %zmm6 {%k6}
+; AVX512BW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm17 = [19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128]
+; AVX512BW-SLOW-NEXT:    # ymm17 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm17, %ymm26, %ymm18
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm18[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm18 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm18, %ymm25, %ymm28
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm27, %ymm28, %ymm27
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm15, %ymm26, %ymm15
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm25 = ymm25[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm25 = ymm25[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm25, %ymm15 {%k5}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm27, %zmm15
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,128,21,128,21,20,128,22,128,24,128,22,23,128,25]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm23, %ymm26
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm26 = ymm26[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm27 = [128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128]
+; AVX512BW-SLOW-NEXT:    # ymm27 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm24, %ymm28
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm26, %ymm28, %ymm26
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm8, %ymm24, %ymm8
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm23 = ymm23[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm23 = ymm23[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm23, %ymm8 {%k1}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm26, %zmm8
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k4}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = [4,6,5,5,5,5,4,6,6,6,6,6,7,7,7,7]
+; AVX512BW-SLOW-NEXT:    vpermd %zmm16, %zmm15, %zmm15
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdx), %xmm16
+; AVX512BW-SLOW-NEXT:    movabsq $-8925843906633654008, %rax # imm = 0x8421084210842108
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rcx), %xmm15
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm20, %xmm15, %xmm20
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm22, %xmm16, %xmm22
+; AVX512BW-SLOW-NEXT:    vporq %xmm20, %xmm22, %xmm20
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm15[0],xmm16[0],xmm15[1],xmm16[1],xmm15[2],xmm16[2],xmm15[3],xmm16[3],xmm15[4],xmm16[4],xmm15[5],xmm16[5],xmm15[6],xmm16[6],xmm15[7],xmm16[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm7, %xmm15, %xmm7
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rsi), %xmm15
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm20, %zmm7, %zmm7
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdi), %xmm16
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm19, %xmm16, %xmm19
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm21, %xmm15, %xmm20
+; AVX512BW-SLOW-NEXT:    vporq %xmm19, %xmm20, %xmm19
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm16[0],xmm15[0],xmm16[1],xmm15[1],xmm16[2],xmm15[2],xmm16[3],xmm15[3],xmm16[4],xmm15[4],xmm16[5],xmm15[5],xmm16[6],xmm15[6],xmm16[7],xmm15[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm9, %xmm15, %xmm9
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm19, %zmm9, %zmm9
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[0,0,1,1,4,4,5,5]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[0,0,1,1,4,4,5,5]
+; AVX512BW-SLOW-NEXT:    movabsq $-4165393823095705204, %rax # imm = 0xC6318C6318C6318C
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm7, %zmm9 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [0,0,0,0,0,0,1,1,1,1,2,2,2,2,2,2]
+; AVX512BW-SLOW-NEXT:    vpermd %zmm3, %zmm7, %zmm3
+; AVX512BW-SLOW-NEXT:    movabsq $595056260442243600, %rax # imm = 0x842108421084210
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm9 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm1, %ymm3
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm7
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm11, %ymm0, %ymm0
+; AVX512BW-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
+; AVX512BW-SLOW-NEXT:    vpermd %ymm5, %ymm12, %ymm1
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm14, %ymm4, %ymm1 {%k3}
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm17, %ymm4, %ymm3
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm18, %ymm5, %ymm4
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,3,3]
+; AVX512BW-SLOW-NEXT:    vpor %ymm3, %ymm4, %ymm3
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k2}
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r8), %ymm0
+; AVX512BW-SLOW-NEXT:    vpermd %ymm0, %ymm13, %ymm3
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,2,1,1,4,6,5,5]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,3,2]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm0
+; AVX512BW-SLOW-NEXT:    movabsq $1190112520884487201, %rax # imm = 0x1084210842108421
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm1, 64(%r9)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm9, (%r9)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm8, 256(%r9)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm6, 192(%r9)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%r9)
+; AVX512BW-SLOW-NEXT:    vzeroupper
+; AVX512BW-SLOW-NEXT:    retq
 ;
 ; AVX512BW-FAST-LABEL: store_i8_stride5_vf64:
 ; AVX512BW-FAST:       # %bb.0:
@@ -4579,186 +4579,6 @@ define void @store_i8_stride5_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FAST-NEXT:    vmovdqa64 %zmm0, 192(%r9)
 ; AVX512BW-FAST-NEXT:    vzeroupper
 ; AVX512BW-FAST-NEXT:    retq
-;
-; AVX512DQBW-SLOW-LABEL: store_i8_stride5_vf64:
-; AVX512DQBW-SLOW:       # %bb.0:
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rcx), %ymm0
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm8 = [9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12,9,14,11,0,13,10,15,12]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm8, %ymm0, %ymm2
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm4 = ymm1[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm4 = ymm4[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    movl $693250386, %eax # imm = 0x29522952
-; AVX512DQBW-SLOW-NEXT:    kmovd %eax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm4, %ymm2 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm6
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm12
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm4 = xmm12[0],xmm6[0],xmm12[1],xmm6[1],xmm12[2],xmm6[2],xmm12[3],xmm6[3],xmm12[4],xmm6[4],xmm12[5],xmm6[5],xmm12[6],xmm6[6],xmm12[7],xmm6[7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm7 = <2,u,1,0,5,4,u,3,u,7,6,11,10,u,9,8>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm7, %xmm4, %xmm4
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[0,0,1,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm10
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rsi), %ymm4
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm15 = [11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14,11,0,13,10,15,12,0,14]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm15, %ymm4, %ymm2
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdi), %ymm5
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm9 = ymm5[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    movl $1251232404, %eax # imm = 0x4A944A94
-; AVX512DQBW-SLOW-NEXT:    kmovd %eax, %k5
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm9, %ymm2 {%k5}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm13
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm14
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm11 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,4,5,u,2,3,6,7,10,11,u,8,9,12,13>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm9, %xmm11, %xmm11
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[0,0,1,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm2, %zmm2
-; AVX512DQBW-SLOW-NEXT:    movabsq $1785168781326730801, %rax # imm = 0x18C6318C6318C631
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k4
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k4}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm16
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [6,6,6,6,7,7,7,7,16,16,16,16,16,16,17,17]
-; AVX512DQBW-SLOW-NEXT:    vpermi2d %zmm16, %zmm3, %zmm10
-; AVX512DQBW-SLOW-NEXT:    movabsq $2380225041768974402, %rax # imm = 0x2108421084210842
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm10, %zmm2 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm23
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm10 = [128,128,12,13,128,128,128,128,14,128,128,128,14,15,128,128,128,128,16,128,128,128,16,17,128,128,128,128,18,128,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm10, %ymm23, %ymm17
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm24
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [128,128,128,128,13,128,128,128,128,14,128,128,128,128,15,128,128,128,128,16,128,128,128,128,17,128,128,128,128,18,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm11, %ymm24, %ymm18
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm17, %ymm18, %ymm17
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm20 = <128,6,128,8,u,128,7,128,9,128,11,u,128,10,128,12>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm20, %xmm12, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm22 = <6,128,8,128,u,7,128,9,128,11,128,u,10,128,12,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm22, %xmm6, %xmm6
-; AVX512DQBW-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm6, %zmm6
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <8,128,u,7,128,9,128,u,128,u,10,128,12,128,u,11>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm19, %xmm14, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm21 = <128,8,u,128,7,128,9,u,11,u,128,10,128,12,u,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm21, %xmm13, %xmm13
-; AVX512DQBW-SLOW-NEXT:    vpor %xmm12, %xmm13, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm13 = ymm12[0,0,1,1]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm25
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm12 = <3,3,3,u,4,4,4,4>
-; AVX512DQBW-SLOW-NEXT:    vpermd %ymm25, %ymm12, %ymm17
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm26
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm14 = [0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14,0,0,13,2,15,0,1,14]
-; AVX512DQBW-SLOW-NEXT:    movl $138547332, %eax # imm = 0x8421084
-; AVX512DQBW-SLOW-NEXT:    kmovd %eax, %k3
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm14, %ymm26, %ymm17 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm13, %zmm13
-; AVX512DQBW-SLOW-NEXT:    movabsq $-8330787646191410408, %rax # imm = 0x8C6318C6318C6318
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm6 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm13 = <3,3,3,3,u,4,4,4>
-; AVX512DQBW-SLOW-NEXT:    vpermd %ymm16, %ymm13, %ymm17
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} xmm18 = mem[1,1,2,2]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm18 = ymm18[0,1,1,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm18, %zmm17
-; AVX512DQBW-SLOW-NEXT:    movabsq $4760450083537948804, %rax # imm = 0x4210842108421084
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k6
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm17, %zmm6 {%k6}
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm17 = [19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128]
-; AVX512DQBW-SLOW-NEXT:    # ymm17 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm17, %ymm26, %ymm18
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm18[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm18 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,21,128,128,20,128,22,128,24,128,128,23,128,25,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm18, %ymm25, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm27, %ymm28, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm15, %ymm26, %ymm15
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm25 = ymm25[0,1,2,3,6,5,6,7,8,9,10,11,14,13,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm25 = ymm25[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm25, %ymm15 {%k5}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm27, %zmm15
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,18,19,128,21,128,21,20,128,22,128,24,128,22,23,128,25]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm23, %ymm26
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm26 = ymm26[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm27 = [128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128,128,128,19,128,21,128,128,20,128,22,128,24,128,128,23,128]
-; AVX512DQBW-SLOW-NEXT:    # ymm27 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm24, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm26, %ymm28, %ymm26
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm8, %ymm24, %ymm8
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm23 = ymm23[0,1,2,3,5,6,7,6,8,9,10,11,13,14,15,14]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm23 = ymm23[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm23, %ymm8 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm26, %zmm8
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k4}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm15 = [4,6,5,5,5,5,4,6,6,6,6,6,7,7,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpermd %zmm16, %zmm15, %zmm15
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdx), %xmm16
-; AVX512DQBW-SLOW-NEXT:    movabsq $-8925843906633654008, %rax # imm = 0x8421084210842108
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm15, %zmm8 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rcx), %xmm15
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm20, %xmm15, %xmm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm22, %xmm16, %xmm22
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm20, %xmm22, %xmm20
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm15[0],xmm16[0],xmm15[1],xmm16[1],xmm15[2],xmm16[2],xmm15[3],xmm16[3],xmm15[4],xmm16[4],xmm15[5],xmm16[5],xmm15[6],xmm16[6],xmm15[7],xmm16[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm7, %xmm15, %xmm7
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rsi), %xmm15
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm20, %zmm7, %zmm7
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdi), %xmm16
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm19, %xmm16, %xmm19
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm21, %xmm15, %xmm20
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm19, %xmm20, %xmm19
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm16[0],xmm15[0],xmm16[1],xmm15[1],xmm16[2],xmm15[2],xmm16[3],xmm15[3],xmm16[4],xmm15[4],xmm16[5],xmm15[5],xmm16[6],xmm15[6],xmm16[7],xmm15[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm9, %xmm15, %xmm9
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm19, %zmm9, %zmm9
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[0,0,1,1,4,4,5,5]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[0,0,1,1,4,4,5,5]
-; AVX512DQBW-SLOW-NEXT:    movabsq $-4165393823095705204, %rax # imm = 0xC6318C6318C6318C
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm7, %zmm9 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [0,0,0,0,0,0,1,1,1,1,2,2,2,2,2,2]
-; AVX512DQBW-SLOW-NEXT:    vpermd %zmm3, %zmm7, %zmm3
-; AVX512DQBW-SLOW-NEXT:    movabsq $595056260442243600, %rax # imm = 0x842108421084210
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm9 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm1, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm7
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm10, %ymm1, %ymm1
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm11, %ymm0, %ymm0
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm1, %ymm0, %ymm0
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vpermd %ymm5, %ymm12, %ymm1
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm14, %ymm4, %ymm1 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm17, %ymm4, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm18, %ymm5, %ymm4
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,2,3,3]
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm3, %ymm4, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm1, %zmm1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r8), %ymm0
-; AVX512DQBW-SLOW-NEXT:    vpermd %ymm0, %ymm13, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm0 = ymm0[0,2,1,1,4,6,5,5]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,3,2]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm0
-; AVX512DQBW-SLOW-NEXT:    movabsq $1190112520884487201, %rax # imm = 0x1084210842108421
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm1 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm1, 64(%r9)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm9, (%r9)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm8, 256(%r9)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm6, 192(%r9)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm2, 128(%r9)
-; AVX512DQBW-SLOW-NEXT:    vzeroupper
-; AVX512DQBW-SLOW-NEXT:    retq
   %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
   %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
   %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
@@ -4781,11 +4601,13 @@ define void @store_i8_stride5_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
+; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
 ; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
 ; AVX512DQBW-ONLY: {{.*}}
+; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-6.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-6.ll
index 493728470f30169..3bc7b6e958223d1 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-6.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-6.ll
@@ -4671,215 +4671,215 @@ define void @store_i8_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512F-FAST-NEXT:    vzeroupper
 ; AVX512F-FAST-NEXT:    retq
 ;
-; AVX512BW-ONLY-SLOW-LABEL: store_i8_stride6_vf64:
-; AVX512BW-ONLY-SLOW:       # %bb.0:
-; AVX512BW-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r8), %zmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r9), %zmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm16
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm0, %zmm7, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm4
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm10
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm5, %ymm20, %ymm5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm18
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm19
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vprold $16, %ymm6, %ymm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm5
-; AVX512BW-ONLY-SLOW-NEXT:    movl $613566756, %r10d # imm = 0x24924924
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %r10d, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm5, %zmm0 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%r8), %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm6, %ymm23, %ymm6
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r8), %ymm21
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm24 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm24, %ymm21, %ymm15
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm6, %zmm6
-; AVX512BW-ONLY-SLOW-NEXT:    movl $1227133513, %r10d # imm = 0x49249249
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %r10d, %k2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm6, %zmm0 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm22, %ymm23, %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r9), %ymm22
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm22, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm27, %zmm25, %zmm25
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm25, %zmm0 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm29, %zmm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm28, %zmm7, %zmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm28, %ymm20, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm29
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vprold $16, %ymm30, %ymm30
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm30, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm30
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm24, %ymm30, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm24, %ymm23
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm7 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm26 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm25, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm27, %ymm23
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm23[0],ymm20[0],ymm23[1],ymm20[1],ymm23[2],ymm20[2],ymm23[3],ymm20[3],ymm23[4],ymm20[4],ymm23[5],ymm20[5],ymm23[6],ymm20[6],ymm23[7],ymm20[7],ymm23[16],ymm20[16],ymm23[17],ymm20[17],ymm23[18],ymm20[18],ymm23[19],ymm20[19],ymm23[20],ymm20[20],ymm23[21],ymm20[21],ymm23[22],ymm20[22],ymm23[23],ymm20[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm23 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm23, %ymm25, %ymm23
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm23 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm28, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm29, %ymm31
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm28, %ymm29, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm27 = zmm30[0,1,2,3],zmm14[4,5,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm28, %zmm27, %zmm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm27 = zmm27[2,2,2,3,6,6,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $-1840700270, %ecx # imm = 0x92492492
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %ecx, %k2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,2,3],zmm12[4,5,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm27, %zmm24, %zmm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,2,2,3,6,6,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rcx, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm24, %zmm20 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm16, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm17, %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm24 = ymm26[0],ymm24[0],ymm26[1],ymm24[1],ymm26[2],ymm24[2],ymm26[3],ymm24[3],ymm26[4],ymm24[4],ymm26[5],ymm24[5],ymm26[6],ymm24[6],ymm26[7],ymm24[7],ymm26[16],ymm24[16],ymm26[17],ymm24[17],ymm26[18],ymm24[18],ymm26[19],ymm24[19],ymm26[20],ymm24[20],ymm26[21],ymm24[21],ymm26[22],ymm24[22],ymm26[23],ymm24[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm16, %ymm25, %ymm16
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm17
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm18, %ymm16
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm19, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm16 = ymm24[0],ymm16[0],ymm24[1],ymm16[1],ymm24[2],ymm16[2],ymm24[3],ymm16[3],ymm24[4],ymm16[4],ymm24[5],ymm16[5],ymm24[6],ymm16[6],ymm24[7],ymm16[7],ymm24[16],ymm16[16],ymm24[17],ymm16[17],ymm24[18],ymm16[18],ymm24[19],ymm16[19],ymm24[20],ymm16[20],ymm24[21],ymm16[21],ymm24[22],ymm16[22],ymm24[23],ymm16[23]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm18, %ymm29, %ymm18
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm18, %zmm16, %zmm16
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm17, %zmm16 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm21, %zmm14, %zmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm28, %zmm14, %zmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm14, %zmm16 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm12, %zmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm27, %zmm12, %zmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm12, %zmm16 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm9, %xmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm11, %xmm17
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm9, %ymm11, %ymm9
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm9, %zmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm23, %xmm8, %xmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm23, %xmm10, %xmm17
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3],xmm17[4],xmm14[4],xmm17[5],xmm14[5],xmm17[6],xmm14[6],xmm17[7],xmm14[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vprold $16, %xmm8, %xmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm8, %zmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm8, %zmm9 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm8, %zmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm9 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm8, %zmm13, %zmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm8
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $585610922974906400, %rcx # imm = 0x820820820820820
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rcx, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm8, %zmm9 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm1, %xmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm3, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm1, %ymm11, %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm1, %zmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm23, %xmm2, %xmm3
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm23, %xmm4, %xmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vprold $16, %xmm2, %xmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu16 %zmm2, %zmm1 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm3, %zmm2, %zmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm1 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm2, %zmm3, %zmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm2, %zmm1 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm1, (%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm9, 192(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm16, 128(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm20, 320(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm7, 256(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512BW-ONLY-SLOW-NEXT:    retq
+; AVX512BW-SLOW-LABEL: store_i8_stride6_vf64:
+; AVX512BW-SLOW:       # %bb.0:
+; AVX512BW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm14
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r9), %zmm12
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm16
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm17
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rsi), %xmm1
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm11
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
+; AVX512BW-SLOW-NEXT:    vpermw %zmm0, %zmm7, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rcx), %xmm2
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm8
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdx), %xmm4
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm10
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm5, %ymm20, %ymm5
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm18
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm19
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
+; AVX512BW-SLOW-NEXT:    vprold $16, %ymm6, %ymm6
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm5
+; AVX512BW-SLOW-NEXT:    movl $613566756, %r10d # imm = 0x24924924
+; AVX512BW-SLOW-NEXT:    kmovd %r10d, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm5, %zmm0 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r8), %xmm5
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%r8), %xmm13
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm6, %ymm23, %ymm6
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r8), %ymm21
+; AVX512BW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
+; AVX512BW-SLOW-NEXT:    # ymm24 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm24, %ymm21, %ymm15
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm6, %zmm6
+; AVX512BW-SLOW-NEXT:    movl $1227133513, %r10d # imm = 0x49249249
+; AVX512BW-SLOW-NEXT:    kmovd %r10d, %k2
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm6, %zmm0 {%k2}
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r9), %xmm6
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%r9), %xmm15
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm22, %ymm23, %ymm25
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r9), %ymm22
+; AVX512BW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
+; AVX512BW-SLOW-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm22, %ymm27
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm27, %zmm25, %zmm25
+; AVX512BW-SLOW-NEXT:    movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm25, %zmm0 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm25
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm27
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm29, %zmm28
+; AVX512BW-SLOW-NEXT:    vpermw %zmm28, %zmm7, %zmm7
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm28, %ymm20, %ymm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm28
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm29
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
+; AVX512BW-SLOW-NEXT:    vprold $16, %ymm30, %ymm30
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm30, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k1}
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm30
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm24, %ymm30, %ymm24
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k2}
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm24
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm24, %ymm23
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm7 {%k3}
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm26 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm25, %ymm20
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm27, %ymm23
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm23[0],ymm20[0],ymm23[1],ymm20[1],ymm23[2],ymm20[2],ymm23[3],ymm20[3],ymm23[4],ymm20[4],ymm23[5],ymm20[5],ymm23[6],ymm20[6],ymm23[7],ymm20[7],ymm23[16],ymm20[16],ymm23[17],ymm20[17],ymm23[18],ymm20[18],ymm23[19],ymm20[19],ymm23[20],ymm20[20],ymm23[21],ymm20[21],ymm23[22],ymm20[22],ymm23[23],ymm20[23]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm23 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm23, %ymm25, %ymm23
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm27
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm23 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm28, %ymm20
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm29, %ymm31
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm28, %ymm29, %ymm28
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k1}
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm27 = zmm30[0,1,2,3],zmm14[4,5,6,7]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm28, %zmm27, %zmm27
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm27 = zmm27[2,2,2,3,6,6,6,7]
+; AVX512BW-SLOW-NEXT:    movl $-1840700270, %ecx # imm = 0x92492492
+; AVX512BW-SLOW-NEXT:    kmovd %ecx, %k2
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k2}
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,2,3],zmm12[4,5,6,7]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm27, %zmm24, %zmm24
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,2,2,3,6,6,6,7]
+; AVX512BW-SLOW-NEXT:    movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
+; AVX512BW-SLOW-NEXT:    kmovq %rcx, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm24, %zmm20 {%k3}
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm16, %ymm24
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm17, %ymm26
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm24 = ymm26[0],ymm24[0],ymm26[1],ymm24[1],ymm26[2],ymm24[2],ymm26[3],ymm24[3],ymm26[4],ymm24[4],ymm26[5],ymm24[5],ymm26[6],ymm24[6],ymm26[7],ymm24[7],ymm26[16],ymm24[16],ymm26[17],ymm24[17],ymm26[18],ymm24[18],ymm26[19],ymm24[19],ymm26[20],ymm24[20],ymm26[21],ymm24[21],ymm26[22],ymm24[22],ymm26[23],ymm24[23]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm16, %ymm25, %ymm16
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm17
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm18, %ymm16
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm19, %ymm24
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm16 = ymm24[0],ymm16[0],ymm24[1],ymm16[1],ymm24[2],ymm16[2],ymm24[3],ymm16[3],ymm24[4],ymm16[4],ymm24[5],ymm16[5],ymm24[6],ymm16[6],ymm24[7],ymm16[7],ymm24[16],ymm16[16],ymm24[17],ymm16[17],ymm24[18],ymm16[18],ymm24[19],ymm16[19],ymm24[20],ymm16[20],ymm24[21],ymm16[21],ymm24[22],ymm16[22],ymm24[23],ymm16[23]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm18, %ymm29, %ymm18
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm18, %zmm16, %zmm16
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm17, %zmm16 {%k1}
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm21, %zmm14, %zmm14
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm28, %zmm14, %zmm14
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm14, %zmm16 {%k2}
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm12, %zmm12
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm27, %zmm12, %zmm12
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm12, %zmm16 {%k3}
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm9, %xmm14
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm11, %xmm17
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm9, %ymm11, %ymm9
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm9, %zmm9
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm23, %xmm8, %xmm14
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm23, %xmm10, %xmm17
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3],xmm17[4],xmm14[4],xmm17[5],xmm14[5],xmm17[6],xmm14[6],xmm17[7],xmm14[7]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
+; AVX512BW-SLOW-NEXT:    vprold $16, %xmm8, %xmm8
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm8, %zmm8
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm8, %zmm9 {%k2}
+; AVX512BW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm8, %zmm8
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
+; AVX512BW-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm9 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm8, %zmm13, %zmm8
+; AVX512BW-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm8
+; AVX512BW-SLOW-NEXT:    movabsq $585610922974906400, %rcx # imm = 0x820820820820820
+; AVX512BW-SLOW-NEXT:    kmovq %rcx, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm8, %zmm9 {%k3}
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm1, %xmm8
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm3, %xmm12
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm1, %ymm11, %ymm1
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm1, %zmm1
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm23, %xmm2, %xmm3
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm23, %xmm4, %xmm8
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
+; AVX512BW-SLOW-NEXT:    vprold $16, %xmm2, %xmm2
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
+; AVX512BW-SLOW-NEXT:    vmovdqu16 %zmm2, %zmm1 {%k2}
+; AVX512BW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm3, %zmm2, %zmm2
+; AVX512BW-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm1 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm2, %zmm3, %zmm2
+; AVX512BW-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm2
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm2, %zmm1 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm1, (%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm9, 192(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm16, 128(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm20, 320(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm7, 256(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
+; AVX512BW-SLOW-NEXT:    vzeroupper
+; AVX512BW-SLOW-NEXT:    retq
 ;
 ; AVX512BW-FAST-LABEL: store_i8_stride6_vf64:
 ; AVX512BW-FAST:       # %bb.0:
@@ -5083,216 +5083,6 @@ define void @store_i8_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FAST-NEXT:    vmovdqa64 %zmm0, 320(%rax)
 ; AVX512BW-FAST-NEXT:    vzeroupper
 ; AVX512BW-FAST-NEXT:    retq
-;
-; AVX512DQBW-SLOW-LABEL: store_i8_stride6_vf64:
-; AVX512DQBW-SLOW:       # %bb.0:
-; AVX512DQBW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm14
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r9), %zmm12
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm16
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm17
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm0 = ymm17[0],ymm16[0],ymm17[1],ymm16[1],ymm17[2],ymm16[2],ymm17[3],ymm16[3],ymm17[4],ymm16[4],ymm17[5],ymm16[5],ymm17[6],ymm16[6],ymm17[7],ymm16[7],ymm17[16],ymm16[16],ymm17[17],ymm16[17],ymm17[18],ymm16[18],ymm17[19],ymm16[19],ymm17[20],ymm16[20],ymm17[21],ymm16[21],ymm17[22],ymm16[22],ymm17[23],ymm16[23]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rsi), %xmm1
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm9
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdi), %xmm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rdi), %xmm11
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm3[8],xmm1[8],xmm3[9],xmm1[9],xmm3[10],xmm1[10],xmm3[11],xmm1[11],xmm3[12],xmm1[12],xmm3[13],xmm1[13],xmm3[14],xmm1[14],xmm3[15],xmm1[15]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm7 = [4,3,6,5,4,3,6,5,4,3,6,5,7,7,7,7,24,27,26,25,24,27,26,25,24,27,26,25,28,29,30,29]
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm0, %zmm7, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rcx), %xmm2
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rcx), %xmm8
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdx), %xmm4
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rdx), %xmm10
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm5 = xmm4[8],xmm2[8],xmm4[9],xmm2[9],xmm4[10],xmm2[10],xmm4[11],xmm2[11],xmm4[12],xmm2[12],xmm4[13],xmm2[13],xmm4[14],xmm2[14],xmm4[15],xmm2[15]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm20 = [5,4,3,6,5,4,3,6,5,4,3,6,7,7,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm5, %ymm20, %ymm5
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm18
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm19
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm6 = ymm19[0],ymm18[0],ymm19[1],ymm18[1],ymm19[2],ymm18[2],ymm19[3],ymm18[3],ymm19[4],ymm18[4],ymm19[5],ymm18[5],ymm19[6],ymm18[6],ymm19[7],ymm18[7],ymm19[16],ymm18[16],ymm19[17],ymm18[17],ymm19[18],ymm18[18],ymm19[19],ymm18[19],ymm19[20],ymm18[20],ymm19[21],ymm18[21],ymm19[22],ymm18[22],ymm19[23],ymm18[23]
-; AVX512DQBW-SLOW-NEXT:    vprold $16, %ymm6, %ymm6
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm6, %zmm5, %zmm5
-; AVX512DQBW-SLOW-NEXT:    movl $613566756, %r10d # imm = 0x24924924
-; AVX512DQBW-SLOW-NEXT:    kmovd %r10d, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm5, %zmm0 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r8), %xmm5
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%r8), %xmm13
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm5[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [2,5,4,3,2,5,4,3,2,5,4,3,6,5,6,7]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm6, %ymm23, %ymm6
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r8), %ymm21
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm24 = [2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0]
-; AVX512DQBW-SLOW-NEXT:    # ymm24 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm24, %ymm21, %ymm15
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm6, %zmm6
-; AVX512DQBW-SLOW-NEXT:    movl $1227133513, %r10d # imm = 0x49249249
-; AVX512DQBW-SLOW-NEXT:    kmovd %r10d, %k2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm6, %zmm0 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r9), %xmm6
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm6[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm22, %ymm23, %ymm25
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r9), %ymm22
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm26 = [0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0,0,2,0,1,0,0,0,3,0,0,0,0,0,4,0,0]
-; AVX512DQBW-SLOW-NEXT:    # ymm26 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm22, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm27, %zmm25, %zmm25
-; AVX512DQBW-SLOW-NEXT:    movabsq $2342443691899625602, %r10 # imm = 0x2082082082082082
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm25, %zmm0 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm25
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm27
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm28 = ymm27[0],ymm25[0],ymm27[1],ymm25[1],ymm27[2],ymm25[2],ymm27[3],ymm25[3],ymm27[4],ymm25[4],ymm27[5],ymm25[5],ymm27[6],ymm25[6],ymm27[7],ymm25[7],ymm27[16],ymm25[16],ymm27[17],ymm25[17],ymm27[18],ymm25[18],ymm27[19],ymm25[19],ymm27[20],ymm25[20],ymm27[21],ymm25[21],ymm27[22],ymm25[22],ymm27[23],ymm25[23]
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm29 = xmm11[8],xmm9[8],xmm11[9],xmm9[9],xmm11[10],xmm9[10],xmm11[11],xmm9[11],xmm11[12],xmm9[12],xmm11[13],xmm9[13],xmm11[14],xmm9[14],xmm11[15],xmm9[15]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm29, %zmm28
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm28, %zmm7, %zmm7
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm28 = xmm10[8],xmm8[8],xmm10[9],xmm8[9],xmm10[10],xmm8[10],xmm10[11],xmm8[11],xmm10[12],xmm8[12],xmm10[13],xmm8[13],xmm10[14],xmm8[14],xmm10[15],xmm8[15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm28, %ymm20, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rcx), %ymm28
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm29
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm30 = ymm29[0],ymm28[0],ymm29[1],ymm28[1],ymm29[2],ymm28[2],ymm29[3],ymm28[3],ymm29[4],ymm28[4],ymm29[5],ymm28[5],ymm29[6],ymm28[6],ymm29[7],ymm28[7],ymm29[16],ymm28[16],ymm29[17],ymm28[17],ymm29[18],ymm28[18],ymm29[19],ymm28[19],ymm29[20],ymm28[20],ymm29[21],ymm28[21],ymm29[22],ymm28[22],ymm29[23],ymm28[23]
-; AVX512DQBW-SLOW-NEXT:    vprold $16, %ymm30, %ymm30
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm30, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm13[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm30
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm24, %ymm30, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm20, %zmm7 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm20 = xmm15[8,8,9,9,10,10,11,11,12,12,13,13,14,14,15,15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm20, %ymm23, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm24, %ymm23
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm23 = ymm23[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm7 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm26 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm25, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm27, %ymm23
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm23[0],ymm20[0],ymm23[1],ymm20[1],ymm23[2],ymm20[2],ymm23[3],ymm20[3],ymm23[4],ymm20[4],ymm23[5],ymm20[5],ymm23[6],ymm20[6],ymm23[7],ymm20[7],ymm23[16],ymm20[16],ymm23[17],ymm20[17],ymm23[18],ymm20[18],ymm23[19],ymm20[19],ymm23[20],ymm20[20],ymm23[21],ymm20[21],ymm23[22],ymm20[22],ymm23[23],ymm20[23]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm23 = ymm27[8],ymm25[8],ymm27[9],ymm25[9],ymm27[10],ymm25[10],ymm27[11],ymm25[11],ymm27[12],ymm25[12],ymm27[13],ymm25[13],ymm27[14],ymm25[14],ymm27[15],ymm25[15],ymm27[24],ymm25[24],ymm27[25],ymm25[25],ymm27[26],ymm25[26],ymm27[27],ymm25[27],ymm27[28],ymm25[28],ymm27[29],ymm25[29],ymm27[30],ymm25[30],ymm27[31],ymm25[31]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [12,11,14,13,12,11,14,13,12,11,14,13,15,15,15,15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm23, %ymm25, %ymm23
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm23, %zmm20, %zmm27
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm23 = [5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10,5,8,7,6,9,0,0,10]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm28, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm29, %ymm31
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm20 = ymm31[0],ymm20[0],ymm31[1],ymm20[1],ymm31[2],ymm20[2],ymm31[3],ymm20[3],ymm31[4],ymm20[4],ymm31[5],ymm20[5],ymm31[6],ymm20[6],ymm31[7],ymm20[7],ymm31[16],ymm20[16],ymm31[17],ymm20[17],ymm31[18],ymm20[18],ymm31[19],ymm20[19],ymm31[20],ymm20[20],ymm31[21],ymm20[21],ymm31[22],ymm20[22],ymm31[23],ymm20[23]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm28 = ymm29[8],ymm28[8],ymm29[9],ymm28[9],ymm29[10],ymm28[10],ymm29[11],ymm28[11],ymm29[12],ymm28[12],ymm29[13],ymm28[13],ymm29[14],ymm28[14],ymm29[15],ymm28[15],ymm29[24],ymm28[24],ymm29[25],ymm28[25],ymm29[26],ymm28[26],ymm29[27],ymm28[27],ymm29[28],ymm28[28],ymm29[29],ymm28[29],ymm29[30],ymm28[30],ymm29[31],ymm28[31]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [13,12,11,14,13,12,11,14,13,12,11,14,15,15,15,15]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm28, %ymm29, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm27 = zmm30[0,1,2,3],zmm14[4,5,6,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm28 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15,u>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm28, %zmm27, %zmm27
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm27 = zmm27[2,2,2,3,6,6,6,7]
-; AVX512DQBW-SLOW-NEXT:    movl $-1840700270, %ecx # imm = 0x92492492
-; AVX512DQBW-SLOW-NEXT:    kmovd %ecx, %k2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm27, %zmm20 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm24 = zmm24[0,1,2,3],zmm12[4,5,6,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm27 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,6,u,5,u,8,u,7,u,u,u,9,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,10,u,13,u,12,u,11,u,14,u,u,u,u,u,15>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm27, %zmm24, %zmm24
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,2,2,3,6,6,6,7]
-; AVX512DQBW-SLOW-NEXT:    movabsq $-9076969306111049208, %rcx # imm = 0x8208208208208208
-; AVX512DQBW-SLOW-NEXT:    kmovq %rcx, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm24, %zmm20 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm16, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm17, %ymm26
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm24 = ymm26[0],ymm24[0],ymm26[1],ymm24[1],ymm26[2],ymm24[2],ymm26[3],ymm24[3],ymm26[4],ymm24[4],ymm26[5],ymm24[5],ymm26[6],ymm24[6],ymm26[7],ymm24[7],ymm26[16],ymm24[16],ymm26[17],ymm24[17],ymm26[18],ymm24[18],ymm26[19],ymm24[19],ymm26[20],ymm24[20],ymm26[21],ymm24[21],ymm26[22],ymm24[22],ymm26[23],ymm24[23]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm16 = ymm17[8],ymm16[8],ymm17[9],ymm16[9],ymm17[10],ymm16[10],ymm17[11],ymm16[11],ymm17[12],ymm16[12],ymm17[13],ymm16[13],ymm17[14],ymm16[14],ymm17[15],ymm16[15],ymm17[24],ymm16[24],ymm17[25],ymm16[25],ymm17[26],ymm16[26],ymm17[27],ymm16[27],ymm17[28],ymm16[28],ymm17[29],ymm16[29],ymm17[30],ymm16[30],ymm17[31],ymm16[31]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm16, %ymm25, %ymm16
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm17
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm18, %ymm16
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm19, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} ymm16 = ymm24[0],ymm16[0],ymm24[1],ymm16[1],ymm24[2],ymm16[2],ymm24[3],ymm16[3],ymm24[4],ymm16[4],ymm24[5],ymm16[5],ymm24[6],ymm16[6],ymm24[7],ymm16[7],ymm24[16],ymm16[16],ymm24[17],ymm16[17],ymm24[18],ymm16[18],ymm24[19],ymm16[19],ymm24[20],ymm16[20],ymm24[21],ymm16[21],ymm24[22],ymm16[22],ymm24[23],ymm16[23]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm16 = ymm16[2,2,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} ymm18 = ymm19[8],ymm18[8],ymm19[9],ymm18[9],ymm19[10],ymm18[10],ymm19[11],ymm18[11],ymm19[12],ymm18[12],ymm19[13],ymm18[13],ymm19[14],ymm18[14],ymm19[15],ymm18[15],ymm19[24],ymm18[24],ymm19[25],ymm18[25],ymm19[26],ymm18[26],ymm19[27],ymm18[27],ymm19[28],ymm18[28],ymm19[29],ymm18[29],ymm19[30],ymm18[30],ymm19[31],ymm18[31]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm18, %ymm29, %ymm18
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm18, %zmm16, %zmm16
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm17, %zmm16 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm21, %zmm14, %zmm14
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm28, %zmm14, %zmm14
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm14 = zmm14[2,2,2,3,6,6,6,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm14, %zmm16 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm22, %zmm12, %zmm12
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm27, %zmm12, %zmm12
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm12 = zmm12[2,2,2,3,6,6,6,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm12, %zmm16 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} xmm12 = [8,7,6,9,0,0,10,0,8,7,6,9,0,0,10,0]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm9, %xmm14
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm11, %xmm17
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm14 = xmm17[8],xmm14[8],xmm17[9],xmm14[9],xmm17[10],xmm14[10],xmm17[11],xmm14[11],xmm17[12],xmm14[12],xmm17[13],xmm14[13],xmm17[14],xmm14[14],xmm17[15],xmm14[15]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[0,0,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm11[0],xmm9[0],xmm11[1],xmm9[1],xmm11[2],xmm9[2],xmm11[3],xmm9[3],xmm11[4],xmm9[4],xmm11[5],xmm9[5],xmm11[6],xmm9[6],xmm11[7],xmm9[7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm11 = [0,3,2,1,0,3,2,1,0,3,2,1,4,5,6,5]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm9, %ymm11, %ymm9
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm9, %zmm9
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm23, %xmm8, %xmm14
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm23, %xmm10, %xmm17
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm17[0],xmm14[0],xmm17[1],xmm14[1],xmm17[2],xmm14[2],xmm17[3],xmm14[3],xmm17[4],xmm14[4],xmm17[5],xmm14[5],xmm17[6],xmm14[6],xmm17[7],xmm14[7]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm10[0],xmm8[0],xmm10[1],xmm8[1],xmm10[2],xmm8[2],xmm10[3],xmm8[3],xmm10[4],xmm8[4],xmm10[5],xmm8[5],xmm10[6],xmm8[6],xmm10[7],xmm8[7]
-; AVX512DQBW-SLOW-NEXT:    vprold $16, %xmm8, %xmm8
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm8, %zmm8
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[0,0,0,1,4,4,4,5]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm8, %zmm9 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm8 = xmm13[0],zero,xmm13[1],zero,xmm13[2],zero,xmm13[3],zero,xmm13[4],zero,xmm13[5],zero,xmm13[6],zero,xmm13[7],zero
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} xmm10 = xmm13[2,1,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm10 = xmm10[0],zero,xmm10[1],zero,xmm10[2],zero,xmm10[3],zero,xmm10[4],zero,xmm10[5],zero,xmm10[6],zero,xmm10[7],zero
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm8, %zmm8
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm10 = [2,1,0,3,2,1,0,3,2,1,0,3,4,4,4,4,22,21,16,23,22,21,16,23,22,21,16,23,17,17,17,17]
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm9 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} xmm8 = xmm15[2,1,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm8 = xmm8[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm13 = xmm15[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm8, %zmm13, %zmm8
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm8, %zmm10, %zmm8
-; AVX512DQBW-SLOW-NEXT:    movabsq $585610922974906400, %rcx # imm = 0x820820820820820
-; AVX512DQBW-SLOW-NEXT:    kmovq %rcx, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm8, %zmm9 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm1, %xmm8
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm3, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm8 = xmm12[8],xmm8[8],xmm12[9],xmm8[9],xmm12[10],xmm8[10],xmm12[11],xmm8[11],xmm12[12],xmm8[12],xmm12[13],xmm8[13],xmm12[14],xmm8[14],xmm12[15],xmm8[15]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[0,0,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm3[0],xmm1[0],xmm3[1],xmm1[1],xmm3[2],xmm1[2],xmm3[3],xmm1[3],xmm3[4],xmm1[4],xmm3[5],xmm1[5],xmm3[6],xmm1[6],xmm3[7],xmm1[7]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm1, %ymm11, %ymm1
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm1, %zmm1
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm23, %xmm2, %xmm3
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm23, %xmm4, %xmm8
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm8[0],xmm3[0],xmm8[1],xmm3[1],xmm8[2],xmm3[2],xmm8[3],xmm3[3],xmm8[4],xmm3[4],xmm8[5],xmm3[5],xmm8[6],xmm3[6],xmm8[7],xmm3[7]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm4[0],xmm2[0],xmm4[1],xmm2[1],xmm4[2],xmm2[2],xmm4[3],xmm2[3],xmm4[4],xmm2[4],xmm4[5],xmm2[5],xmm4[6],xmm2[6],xmm4[7],xmm2[7]
-; AVX512DQBW-SLOW-NEXT:    vprold $16, %xmm2, %xmm2
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm2, %zmm2
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm2 = zmm2[0,0,0,1,4,4,4,5]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu16 %zmm2, %zmm1 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm2 = xmm5[0],zero,xmm5[1],zero,xmm5[2],zero,xmm5[3],zero,xmm5[4],zero,xmm5[5],zero,xmm5[6],zero,xmm5[7],zero
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} xmm3 = xmm5[2,1,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpmovzxbw {{.*#+}} xmm3 = xmm3[0],zero,xmm3[1],zero,xmm3[2],zero,xmm3[3],zero,xmm3[4],zero,xmm3[5],zero,xmm3[6],zero,xmm3[7],zero
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm3, %zmm2, %zmm2
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm1 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} xmm2 = xmm6[2,1,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm2 = xmm2[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm3 = xmm6[0,0,1,1,2,2,3,3,4,4,5,5,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm2, %zmm3, %zmm2
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm2, %zmm10, %zmm2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm2, %zmm1 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm1, (%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm9, 192(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm16, 128(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm20, 320(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm7, 256(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm0, 64(%rax)
-; AVX512DQBW-SLOW-NEXT:    vzeroupper
-; AVX512DQBW-SLOW-NEXT:    retq
   %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
   %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
   %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
@@ -5317,11 +5107,13 @@ define void @store_i8_stride6_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
+; AVX512BW-ONLY-SLOW: {{.*}}
 ; AVX512DQ-FAST: {{.*}}
 ; AVX512DQ-ONLY: {{.*}}
 ; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
 ; AVX512DQBW-ONLY: {{.*}}
+; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-ONLY: {{.*}}
 ; AVX512F-ONLY-FAST: {{.*}}
 ; AVX512F-ONLY-SLOW: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
index bb74d0f8fe7ee14..54ed0f184827a51 100644
--- a/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
+++ b/llvm/test/CodeGen/X86/vector-interleaved-store-i8-stride-7.ll
@@ -3318,160 +3318,160 @@ define void @store_i8_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX2-FAST-PERLANE-NEXT:    vzeroupper
 ; AVX2-FAST-PERLANE-NEXT:    retq
 ;
-; AVX512F-ONLY-SLOW-LABEL: store_i8_stride7_vf32:
-; AVX512F-ONLY-SLOW:       # %bb.0:
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %ymm11
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm5
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %ymm6
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r10), %ymm4
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,u,u,u,u,26,27,24,25]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
-; AVX512F-ONLY-SLOW-NEXT:    vpermi2d %zmm7, %zmm8, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm7, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm5[18],zero,ymm5[20,21,20,21],zero,ymm5[19],zero,ymm5[19,20,21,22],zero
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm5[23],zero,ymm5[23,24,25,26],zero,ymm5[24],zero,ymm5[30,31]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %zmm7, %zmm8, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
-; AVX512F-ONLY-SLOW-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpandq %ymm16, %ymm8, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm11[18,19,20,21],zero,ymm11[19],zero,ymm11[25,26,27,22],zero,ymm11[20],zero
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm9, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %zmm9, %zmm8, %zmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,1,1,4,4,5,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm17 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
-; AVX512F-ONLY-SLOW-NEXT:    # ymm17 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpandq %ymm17, %ymm8, %ymm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm11[23],zero,ymm11[21,22,23,26],zero,ymm11[24],zero,ymm11[28,29,26,27]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm11, %ymm20
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vporq %zmm10, %zmm8, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm8
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm11
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm11[u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u,u],zero
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm12
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm12[u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u,u,9]
-; AVX512F-ONLY-SLOW-NEXT:    vpor %xmm7, %xmm9, %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm9[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm7, %zmm9, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm7[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm13 = xmm7[u,u,u],zero,xmm7[7],zero,xmm7[5,u,u,u],zero,xmm7[8],zero,xmm7[6,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm9
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpor %xmm13, %xmm14, %xmm13
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm14, %zmm13
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm18 = zmm13[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm18
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm13
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm14
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
-; AVX512F-ONLY-SLOW-NEXT:    vpor %xmm10, %xmm15, %xmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm15[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm15, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm19 = zmm10[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa (%r10), %xmm15
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm15[1,1,0,0,4,5,6,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,0]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm0[0,0,1,0,4,4,5,4]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm10
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14,u,u],zero,zero,zero,zero,ymm1[15,u,u],zero,zero,zero,zero,ymm1[16,u,u],zero,zero,zero,zero,ymm1[17,u,u],zero,zero,zero,zero,ymm1[18]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,14],zero,ymm2[u,u,u,u,u,15],zero,ymm2[u,u,u,u,u,16],zero,ymm2[u,u,u,u,u,17],zero,ymm2[u,u,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm1, %ymm7, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm7, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,ymm5[u,u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[13,u,u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm7, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm7, %zmm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
-; AVX512F-ONLY-SLOW-NEXT:    vpandn %ymm7, %ymm9, %ymm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = zero,ymm4[13,u,u,u,u],zero,zero,ymm4[14,u,u,u,u],zero,zero,ymm4[15,u,u,u,u],zero,zero,ymm4[16,u,u,u,u],zero,zero,ymm4[17,u,u]
-; AVX512F-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, %ymm16, %ymm0, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm0
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm1, %ymm2
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm1
-; AVX512F-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512F-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,3,3,6,6,7,7]
-; AVX512F-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm3
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa %ymm3, 192(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm8, 128(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512F-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512F-ONLY-SLOW-NEXT:    retq
+; AVX512F-SLOW-LABEL: store_i8_stride7_vf32:
+; AVX512F-SLOW:       # %bb.0:
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %r10
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %ymm11
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %ymm5
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %ymm6
+; AVX512F-SLOW-NEXT:    vmovdqa (%r10), %ymm4
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,u,u,u,u,26,27,24,25]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
+; AVX512F-SLOW-NEXT:    vpermi2d %zmm7, %zmm8, %zmm9
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm7, %zmm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[2,3,2,3,6,7,6,7]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm5[18],zero,ymm5[20,21,20,21],zero,ymm5[19],zero,ymm5[19,20,21,22],zero
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm5[23],zero,ymm5[23,24,25,26],zero,ymm5[24],zero,ymm5[30,31]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[2,3,2,3,6,7,6,7]
+; AVX512F-SLOW-NEXT:    vporq %zmm7, %zmm8, %zmm7
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm16 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
+; AVX512F-SLOW-NEXT:    # ymm16 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpandq %ymm16, %ymm8, %ymm8
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm11[18,19,20,21],zero,ymm11[19],zero,ymm11[25,26,27,22],zero,ymm11[20],zero
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm9, %zmm8
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
+; AVX512F-SLOW-NEXT:    vporq %zmm9, %zmm8, %zmm9
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,1,1,4,4,5,5]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm17 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
+; AVX512F-SLOW-NEXT:    # ymm17 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-SLOW-NEXT:    vpandq %ymm17, %ymm8, %ymm8
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm11[23],zero,ymm11[21,22,23,26],zero,ymm11[24],zero,ymm11[28,29,26,27]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm11, %ymm20
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm10
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
+; AVX512F-SLOW-NEXT:    vporq %zmm10, %zmm8, %zmm8
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm8
+; AVX512F-SLOW-NEXT:    vmovdqa (%rsi), %xmm11
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm11[u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u,u],zero
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdi), %xmm12
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm12[u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u,u,9]
+; AVX512F-SLOW-NEXT:    vpor %xmm7, %xmm9, %xmm7
+; AVX512F-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm9[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
+; AVX512F-SLOW-NEXT:    vinserti32x4 $2, %xmm7, %zmm9, %zmm7
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm7[0,1,0,1,4,5,4,5]
+; AVX512F-SLOW-NEXT:    vmovdqa (%rcx), %xmm7
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm13 = xmm7[u,u,u],zero,xmm7[7],zero,xmm7[5,u,u,u],zero,xmm7[8],zero,xmm7[6,u,u]
+; AVX512F-SLOW-NEXT:    vmovdqa (%rdx), %xmm9
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
+; AVX512F-SLOW-NEXT:    vpor %xmm13, %xmm14, %xmm13
+; AVX512F-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
+; AVX512F-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm14, %zmm13
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm18 = zmm13[0,1,0,1,4,5,4,5]
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm18
+; AVX512F-SLOW-NEXT:    vmovdqa (%r9), %xmm13
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
+; AVX512F-SLOW-NEXT:    vmovdqa (%r8), %xmm14
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
+; AVX512F-SLOW-NEXT:    vpor %xmm10, %xmm15, %xmm10
+; AVX512F-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm15[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
+; AVX512F-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm15, %zmm10
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm19 = zmm10[0,1,0,1,4,5,4,5]
+; AVX512F-SLOW-NEXT:    vmovdqa (%r10), %xmm15
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
+; AVX512F-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm15[1,1,0,0,4,5,6,7]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,0]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm0[0,0,1,0,4,4,5,4]
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm10
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm10
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14,u,u],zero,zero,zero,zero,ymm1[15,u,u],zero,zero,zero,zero,ymm1[16,u,u],zero,zero,zero,zero,ymm1[17,u,u],zero,zero,zero,zero,ymm1[18]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm19
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
+; AVX512F-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u,u,u]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm2
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,14],zero,ymm2[u,u,u,u,u,15],zero,ymm2[u,u,u,u,u,16],zero,ymm2[u,u,u,u,u,17],zero,ymm2[u,u,u,u,u]
+; AVX512F-SLOW-NEXT:    vpor %ymm1, %ymm7, %ymm1
+; AVX512F-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm7, %zmm1
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,ymm5[u,u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[13,u,u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u]
+; AVX512F-SLOW-NEXT:    vpor %ymm0, %ymm7, %ymm0
+; AVX512F-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm7, %zmm0
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
+; AVX512F-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
+; AVX512F-SLOW-NEXT:    vpandn %ymm7, %ymm9, %ymm7
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = zero,ymm4[13,u,u,u,u],zero,zero,ymm4[14,u,u,u,u],zero,zero,ymm4[15,u,u,u,u],zero,zero,ymm4[16,u,u,u,u],zero,zero,ymm4[17,u,u]
+; AVX512F-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
+; AVX512F-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm7
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpternlogq $248, %ymm16, %ymm0, %ymm1
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm0
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero,zero
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm1, %ymm2
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm1
+; AVX512F-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
+; AVX512F-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
+; AVX512F-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,3,3,6,6,7,7]
+; AVX512F-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
+; AVX512F-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
+; AVX512F-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm3
+; AVX512F-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm3
+; AVX512F-SLOW-NEXT:    vmovdqa %ymm3, 192(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm10, (%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm8, 128(%rax)
+; AVX512F-SLOW-NEXT:    vmovdqa64 %zmm7, 64(%rax)
+; AVX512F-SLOW-NEXT:    vzeroupper
+; AVX512F-SLOW-NEXT:    retq
 ;
 ; AVX512F-FAST-LABEL: store_i8_stride7_vf32:
 ; AVX512F-FAST:       # %bb.0:
@@ -3622,161 +3622,6 @@ define void @store_i8_stride7_vf32(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512F-FAST-NEXT:    vzeroupper
 ; AVX512F-FAST-NEXT:    retq
 ;
-; AVX512DQ-SLOW-LABEL: store_i8_stride7_vf32:
-; AVX512DQ-SLOW:       # %bb.0:
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %r10
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %ymm11
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %ymm5
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %ymm6
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r10), %ymm4
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,u,u,u,u,26,27,24,25]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm4[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm9 = <u,5,4,u,5,u,4,u,20,21,u,23,u,21,u,23>
-; AVX512DQ-SLOW-NEXT:    vpermi2d %zmm7, %zmm8, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm6[20],zero,ymm6[18],zero,zero,zero,zero,ymm6[21],zero,ymm6[19],zero,zero,zero,zero,ymm6[22]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm6[25],zero,ymm6[23],zero,zero,zero,zero,ymm6[26],zero,ymm6[24],zero,zero
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm7, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm7 = zmm7[2,3,2,3,6,7,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,ymm5[18],zero,ymm5[20,21,20,21],zero,ymm5[19],zero,ymm5[19,20,21,22],zero
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm5[23],zero,ymm5[23,24,25,26],zero,ymm5[24],zero,ymm5[30,31]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm8 = zmm8[2,3,2,3,6,7,6,7]
-; AVX512DQ-SLOW-NEXT:    vporq %zmm7, %zmm8, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm8 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,25,u,23,u,u,u,u,26,u,24,u,u,u,u,27,u]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm16 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
-; AVX512DQ-SLOW-NEXT:    # ymm16 = mem[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vpandq %ymm16, %ymm8, %ymm8
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,ymm11[18,19,20,21],zero,ymm11[19],zero,ymm11[25,26,27,22],zero,ymm11[20],zero
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm8, %zmm9, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm3[18],zero,zero,zero,zero,ymm3[21],zero,ymm3[19],zero,zero,zero,zero,ymm3[22],zero,ymm3[20]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm9, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm9 = zmm9[2,3,2,3,6,7,6,7]
-; AVX512DQ-SLOW-NEXT:    vporq %zmm9, %zmm8, %zmm9
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} ymm8 = ymm2[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm8 = ymm8[0,0,1,1,4,4,5,5]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm8 = ymm8[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm17 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
-; AVX512DQ-SLOW-NEXT:    # ymm17 = mem[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vpandq %ymm17, %ymm8, %ymm8
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25],zero,ymm11[23],zero,ymm11[21,22,23,26],zero,ymm11[24],zero,ymm11[28,29,26,27]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm11, %ymm20
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm8, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[21],zero,ymm1[19],zero,zero,zero,zero,ymm1[22],zero,ymm1[20],zero,zero
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm3[25],zero,ymm3[23],zero,zero,zero,zero,ymm3[26],zero,ymm3[24],zero,zero,zero,zero
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm10, %zmm10
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm10[2,3,2,3,6,7,6,7]
-; AVX512DQ-SLOW-NEXT:    vporq %zmm10, %zmm8, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm8
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm8
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rsi), %xmm11
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm11[u],zero,xmm11[7],zero,xmm11[5,u,u,u],zero,xmm11[8],zero,xmm11[6,u,u,u],zero
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdi), %xmm12
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm12[u,7],zero,xmm12[5],zero,xmm12[u,u,u,8],zero,xmm12[6],zero,xmm12[u,u,u,9]
-; AVX512DQ-SLOW-NEXT:    vpor %xmm7, %xmm9, %xmm7
-; AVX512DQ-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm9 = xmm12[0],xmm11[0],xmm12[1],xmm11[1],xmm12[2],xmm11[2],xmm12[3],xmm11[3],xmm12[4],xmm11[4],xmm12[5],xmm11[5],xmm12[6],xmm11[6],xmm12[7],xmm11[7]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm9 = xmm9[0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5]
-; AVX512DQ-SLOW-NEXT:    vinserti32x4 $2, %xmm7, %zmm9, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm7[0,1,0,1,4,5,4,5]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rcx), %xmm7
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm13 = xmm7[u,u,u],zero,xmm7[7],zero,xmm7[5,u,u,u],zero,xmm7[8],zero,xmm7[6,u,u]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%rdx), %xmm9
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm9[u,u,u,7],zero,xmm9[5],zero,xmm9[u,u,u,8],zero,xmm9[6],zero,xmm9[u,u]
-; AVX512DQ-SLOW-NEXT:    vpor %xmm13, %xmm14, %xmm13
-; AVX512DQ-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm14 = xmm9[0],xmm7[0],xmm9[1],xmm7[1],xmm9[2],xmm7[2],xmm9[3],xmm7[3],xmm9[4],xmm7[4],xmm9[5],xmm7[5],xmm9[6],xmm7[6],xmm9[7],xmm7[7]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm14 = xmm14[4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9]
-; AVX512DQ-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm14, %zmm13
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm18 = zmm13[0,1,0,1,4,5,4,5]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm10, %zmm18
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r9), %xmm13
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = zero,xmm13[4,u,u,u],zero,xmm13[7],zero,xmm13[5,u,u,u],zero,xmm13[8],zero,xmm13[6]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r8), %xmm14
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm14[4],zero,xmm14[u,u,u,7],zero,xmm14[5],zero,xmm14[u,u,u,8],zero,xmm14[6],zero
-; AVX512DQ-SLOW-NEXT:    vpor %xmm10, %xmm15, %xmm10
-; AVX512DQ-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm15 = xmm14[0],xmm13[0],xmm14[1],xmm13[1],xmm14[2],xmm13[2],xmm14[3],xmm13[3],xmm14[4],xmm13[4],xmm14[5],xmm13[5],xmm14[6],xmm13[6],xmm14[7],xmm13[7]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm15 = xmm15[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vinserti32x4 $2, %xmm10, %zmm15, %zmm10
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm19 = zmm10[0,1,0,1,4,5,4,5]
-; AVX512DQ-SLOW-NEXT:    vmovdqa (%r10), %xmm15
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm10 = xmm15[4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshuflw {{.*#+}} xmm0 = xmm15[1,1,0,0,4,5,6,7]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm0 = xmm0[0,1,2,0]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} zmm10 = zmm0[0,0,1,0,4,4,5,4]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm19, %zmm10
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm10
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14,u,u],zero,zero,zero,zero,ymm1[15,u,u],zero,zero,zero,zero,ymm1[16,u,u],zero,zero,zero,zero,ymm1[17,u,u],zero,zero,zero,zero,ymm1[18]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm1, %ymm19
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[0,1,14],zero,ymm2[u,u,0,1,14,15],zero,ymm2[u,u,13,2,3,16],zero,ymm2[u,u,28,29,16,17],zero,ymm2[u,u,19,28,29,18],zero
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm2, %ymm18
-; AVX512DQ-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm7[8],xmm9[8],xmm7[9],xmm9[9],xmm7[10],xmm9[10],xmm7[11],xmm9[11],xmm7[12],xmm9[12],xmm7[13],xmm9[13],xmm7[14],xmm9[14],xmm7[15],xmm9[15]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u],zero,ymm3[14,u,u,u,u,u],zero,ymm3[15,u,u,u,u,u],zero,ymm3[16,u,u,u,u,u],zero,ymm3[17,u,u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm20, %ymm2
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm2[u,u,u,u,14],zero,ymm2[u,u,u,u,u,15],zero,ymm2[u,u,u,u,u,16],zero,ymm2[u,u,u,u,u,17],zero,ymm2[u,u,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vpor %ymm1, %ymm7, %ymm1
-; AVX512DQ-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm11[8],xmm12[8],xmm11[9],xmm12[9],xmm11[10],xmm12[10],xmm11[11],xmm12[11],xmm11[12],xmm12[12],xmm11[13],xmm12[13],xmm11[14],xmm12[14],xmm11[15],xmm12[15]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm1, %zmm7, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm1
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = zero,ymm5[u,u,u,u,u,14],zero,ymm5[u,u,u,u,u,15],zero,ymm5[u,u,u,u,u,16],zero,ymm5[u,u,u,u,u,17],zero,ymm5[u,u,u]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm7 = ymm6[13,u,u,u,u,u],zero,ymm6[14,u,u,u,u,u],zero,ymm6[15,u,u,u,u,u],zero,ymm6[16,u,u,u,u,u],zero,ymm6[17,u,u,u]
-; AVX512DQ-SLOW-NEXT:    vpor %ymm0, %ymm7, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm14[8],xmm13[8],xmm14[9],xmm13[9],xmm14[10],xmm13[10],xmm14[11],xmm13[11],xmm14[12],xmm13[12],xmm14[13],xmm13[13],xmm14[14],xmm13[14],xmm14[15],xmm13[15]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} xmm7 = xmm7[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm7, %zmm0
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} xmm7 = xmm15[0,1,2,3,4,5,5,6]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} xmm7 = xmm7[2,2,3,3]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512DQ-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255,255,0,255,255,255,255,255]
-; AVX512DQ-SLOW-NEXT:    vpandn %ymm7, %ymm9, %ymm7
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm9 = zero,ymm4[13,u,u,u,u],zero,zero,ymm4[14,u,u,u,u],zero,zero,ymm4[15,u,u,u,u],zero,zero,ymm4[16,u,u,u,u],zero,zero,ymm4[17,u,u]
-; AVX512DQ-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm7, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm7
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm6[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm5[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm5[27],zero,zero,zero,zero,ymm5[30],zero,ymm5[28],zero,zero,zero,zero,ymm5[31],zero,ymm5[29]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, %ymm16, %ymm0, %ymm1
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm0 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm0
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,27,u,u,u,u,30,u,28,u,u,u,u,31,u,29,u]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm2 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm2[30],zero,ymm2[28],zero,zero,zero,zero,ymm2[31],zero,ymm2[29],zero,zero
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $248, %ymm17, %ymm1, %ymm2
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm19, %ymm1
-; AVX512DQ-SLOW-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %ymm18, %ymm3
-; AVX512DQ-SLOW-NEXT:    vpshufhw {{.*#+}} ymm3 = ymm3[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512DQ-SLOW-NEXT:    vpshufd {{.*#+}} ymm3 = ymm3[2,2,3,3,6,6,7,7]
-; AVX512DQ-SLOW-NEXT:    vpermq {{.*#+}} ymm3 = ymm3[2,3,2,3]
-; AVX512DQ-SLOW-NEXT:    vpternlogq $236, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm1, %ymm3
-; AVX512DQ-SLOW-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm2, %ymm3
-; AVX512DQ-SLOW-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm3
-; AVX512DQ-SLOW-NEXT:    vmovdqa %ymm3, 192(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm10, (%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm8, 128(%rax)
-; AVX512DQ-SLOW-NEXT:    vmovdqa64 %zmm7, 64(%rax)
-; AVX512DQ-SLOW-NEXT:    vzeroupper
-; AVX512DQ-SLOW-NEXT:    retq
-;
 ; AVX512BW-SLOW-LABEL: store_i8_stride7_vf32:
 ; AVX512BW-SLOW:       # %bb.0:
 ; AVX512BW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
@@ -7817,1193 +7662,756 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512F-SLOW-NEXT:    vzeroupper
 ; AVX512F-SLOW-NEXT:    retq
 ;
-; AVX512F-ONLY-FAST-LABEL: store_i8_stride7_vf64:
-; AVX512F-ONLY-FAST:       # %bb.0:
-; AVX512F-ONLY-FAST-NEXT:    subq $1256, %rsp # imm = 0x4E8
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm1, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm2, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm16
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rax), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm23
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[0,1,14],zero,ymm1[12,13,0,1,14,15],zero,ymm1[3,12,13,2,3,16],zero,ymm1[30,31,28,29,16,17],zero,ymm1[31,18,19,28,29,18],zero
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm26
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm0, %ymm3, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm25
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm5, (%rsp) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
-; AVX512F-ONLY-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm5, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm0, %ymm30
-; AVX512F-ONLY-FAST-NEXT:    vporq %ymm3, %ymm5, %ymm24
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdx), %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rcx), %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm8, %xmm6, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm6, %xmm28
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm9, %xmm19
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
-; AVX512F-ONLY-FAST-NEXT:    vpor %xmm5, %xmm6, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rdi), %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rsi), %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm11, %xmm6, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm10, %xmm27
-; AVX512F-ONLY-FAST-NEXT:    vpor %xmm5, %xmm9, %xmm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%r8), %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm5, %xmm15, %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm0, %xmm10, %xmm12
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm0, %xmm21
-; AVX512F-ONLY-FAST-NEXT:    vporq %xmm9, %xmm12, %xmm22
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm13, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm13, %ymm20
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm14, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm7, %ymm2, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm16, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = zero,zero,zero,ymm7[14],zero,zero,zero,zero,zero,zero,ymm7[15],zero,zero,zero,zero,zero,zero,ymm7[16],zero,zero,zero,zero,zero,zero,ymm7[17],zero,zero,zero,zero,zero,zero,ymm7[18]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm17, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[0,1,14],zero,ymm7[12,13,0,1,14,15],zero,ymm7[3,12,13,2,3,16],zero,ymm7[30,31,28,29,16,17],zero,ymm7[31,18,19,28,29,18],zero
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm2, %ymm7, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm18, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %ymm18, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm25, %ymm7
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm4, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm2, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rsi), %xmm13
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm11, %xmm13, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdi), %xmm9
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vporq %xmm0, %xmm2, %xmm31
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm8, %xmm14, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rdx), %xmm8
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm19, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm2, %xmm8, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r9), %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm5, %xmm2, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm2, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%r8), %xmm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm2, %xmm4, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27],zero,ymm0[25]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm23, %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm12[21],zero,ymm12[19],zero,zero,zero,zero,ymm12[22],zero,ymm12[20],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm26, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm11[25],zero,ymm11[23],zero,zero,zero,zero,ymm11[26],zero,ymm11[24],zero,zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[18],zero,zero,zero,zero,ymm11[21],zero,ymm11[19],zero,zero,zero,zero,ymm11[22],zero,ymm11[20]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
-; AVX512F-ONLY-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128]
-; AVX512F-ONLY-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm2, %ymm19
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm30
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm24, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm2, %xmm2
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,0,1],zmm0[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [2,2,3,3,2,2,3,3]
-; AVX512F-ONLY-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rax), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,6]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm0, %ymm2, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa (%rax), %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm24
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu (%rsp), %ymm0 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm1, %ymm25
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
-; AVX512F-ONLY-FAST-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %ymm26, %ymm0, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm21 = zmm1[0,1,2,3],zmm0[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm28, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm14[8],xmm8[8],xmm14[9],xmm8[9],xmm14[10],xmm8[10],xmm14[11],xmm8[11],xmm14[12],xmm8[12],xmm14[13],xmm8[13],xmm14[14],xmm8[14],xmm14[15],xmm8[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm4 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm4, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm4, %xmm0, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm28
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm27, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %xmm6, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm6 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm13[8],xmm9[8],xmm13[9],xmm9[9],xmm13[10],xmm9[10],xmm13[11],xmm9[11],xmm13[12],xmm9[12],xmm13[13],xmm9[13],xmm13[14],xmm9[14],xmm13[15],xmm9[15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm5, %xmm4, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm5, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti32x4 $2, %xmm1, %zmm0, %zmm27
-; AVX512F-ONLY-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm10[8],xmm15[8],xmm10[9],xmm15[9],xmm10[10],xmm15[10],xmm10[11],xmm15[11],xmm10[12],xmm15[12],xmm10[13],xmm15[13],xmm10[14],xmm15[14],xmm10[15],xmm15[15]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm3, %xmm1, %xmm1
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm22[0,1,0,1],zmm1[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa 32(%rax), %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,6]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm0, %xmm29
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm1, %ymm2, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm11, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm20, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm22 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm19, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm2, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm20 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm0, %ymm2, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm2 = [9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm16, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm0, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm2, %ymm30
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm19 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm18, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [4,5,4,5,5,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm9[0],xmm13[0],xmm9[1],xmm13[1],xmm9[2],xmm13[2],xmm9[3],xmm13[3],xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm6, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm9, %xmm0, %xmm0
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29,28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29]
-; AVX512F-ONLY-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm17, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm9, %ymm3, %ymm15
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm17 = ymm15[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm25, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm18 = ymm13[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vinserti32x4 $2, %xmm31, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm8, %xmm7, %xmm7
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm8, %xmm0, %xmm13
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm0[23],zero,ymm0[23,24,25,26],zero,ymm0[24],zero,ymm0[30,31]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm0, %ymm25
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm8[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa %ymm12, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm23[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm11[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm31 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23]
-; AVX512F-ONLY-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm3, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm30, %ymm3
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm3, %ymm1, %ymm13
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm2, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm12, %ymm9, %ymm9
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm9, %zmm6
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
-; AVX512F-ONLY-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, %ymm9, %ymm5, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm6, %zmm0, %zmm7
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm4, %ymm10, %ymm4
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm3, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm0, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vpandq %ymm9, %ymm22, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm20, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vporq %zmm2, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpandq %ymm26, %ymm19, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm2, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vporq %zmm4, %zmm2, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpandq %ymm26, %ymm18, %ymm0
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm15, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vporq %zmm4, %zmm0, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} zmm2 = zmm28[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} zmm8 = zmm27[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm4, %zmm8
-; AVX512F-ONLY-FAST-NEXT:    vpandq %ymm26, %ymm13, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vporq %zmm2, %zmm1, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm6 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vporq %zmm2, %zmm6, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm4, %zmm9
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm18 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm18 = zmm1[0,1,0,1],mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %xmm29, %xmm3
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[1,1,0,0,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = [0,1,0,1,2,0,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm2, %ymm4, %ymm19
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} xmm6 = xmm1[1,1,0,0,4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm6, %ymm4, %ymm17
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} xmm6 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm6, %xmm3, %xmm10
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %xmm6, %xmm1, %xmm6
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
-; AVX512F-ONLY-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm12
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
-; AVX512F-ONLY-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm14 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
-; AVX512F-ONLY-FAST-NEXT:    # ymm14 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %ymm25, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm1, %ymm15
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm2[23],zero,ymm2[23,24,25,26],zero,ymm2[24],zero,ymm2[30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb %ymm14, %ymm2, %ymm14
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm3[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [4,5,4,5,5,7,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermd %ymm4, %ymm2, %ymm20
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512F-ONLY-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
-; AVX512F-ONLY-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm22 = mem[2,3,2,3,6,7,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm22
-; AVX512F-ONLY-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # ymm23 = mem[0,1,0,1]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm23 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm23
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm24
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm23, %zmm24
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm21
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 32-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm0
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm12, %ymm15, %ymm2
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm7[0,1,2,3],zmm2[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm16
-; AVX512F-ONLY-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512F-ONLY-FAST-NEXT:    # zmm2 = mem[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpermq {{.*#+}} zmm5 = zmm31[0,1,0,1,4,5,4,5]
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm5
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm17, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm2
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm1, %ymm13, %ymm1
-; AVX512F-ONLY-FAST-NEXT:    vpor %ymm11, %ymm14, %ymm5
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm1
-; AVX512F-ONLY-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm5[0,1,2,3],zmm1[4,5,6,7]
-; AVX512F-ONLY-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm20, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm4
-; AVX512F-ONLY-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm4, 128(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm2, (%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm0, 256(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm21, 192(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm24, 64(%rax)
-; AVX512F-ONLY-FAST-NEXT:    vmovdqa64 %zmm22, 384(%rax)
-; AVX512F-ONLY-FAST-NEXT:    addq $1256, %rsp # imm = 0x4E8
-; AVX512F-ONLY-FAST-NEXT:    vzeroupper
-; AVX512F-ONLY-FAST-NEXT:    retq
-;
-; AVX512DQ-FAST-LABEL: store_i8_stride7_vf64:
-; AVX512DQ-FAST:       # %bb.0:
-; AVX512DQ-FAST-NEXT:    subq $1256, %rsp # imm = 0x4E8
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm1, %ymm14
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm2, %ymm13
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm16
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm18
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rax), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm23
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[0,1,14],zero,ymm1[12,13,0,1,14,15],zero,ymm1[3,12,13,2,3,16],zero,ymm1[30,31,28,29,16,17],zero,ymm1[31,18,19,28,29,18],zero
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm26
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm3
-; AVX512DQ-FAST-NEXT:    vpor %ymm0, %ymm3, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm25
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm5, (%rsp) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
-; AVX512DQ-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm5, %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm0, %ymm30
-; AVX512DQ-FAST-NEXT:    vporq %ymm3, %ymm5, %ymm24
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdx), %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rcx), %xmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm8, %xmm6, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm6, %xmm28
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm9, %xmm19
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
-; AVX512DQ-FAST-NEXT:    vpor %xmm5, %xmm6, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rdi), %xmm10
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rsi), %xmm6
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm11, %xmm6, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm10, %xmm27
-; AVX512DQ-FAST-NEXT:    vpor %xmm5, %xmm9, %xmm5
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%r8), %xmm10
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm5, %xmm15, %xmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm0, %xmm10, %xmm12
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm0, %xmm21
-; AVX512DQ-FAST-NEXT:    vporq %xmm9, %xmm12, %xmm22
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm13, %ymm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm13, %ymm20
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm14, %ymm2
-; AVX512DQ-FAST-NEXT:    vpor %ymm7, %ymm2, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm16, %ymm7
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = zero,zero,zero,ymm7[14],zero,zero,zero,zero,zero,zero,ymm7[15],zero,zero,zero,zero,zero,zero,ymm7[16],zero,zero,zero,zero,zero,zero,ymm7[17],zero,zero,zero,zero,zero,zero,ymm7[18]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm17, %ymm7
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[0,1,14],zero,ymm7[12,13,0,1,14,15],zero,ymm7[3,12,13,2,3,16],zero,ymm7[30,31,28,29,16,17],zero,ymm7[31,18,19,28,29,18],zero
-; AVX512DQ-FAST-NEXT:    vpor %ymm2, %ymm7, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm18, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %ymm18, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm25, %ymm7
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm4, %ymm0
-; AVX512DQ-FAST-NEXT:    vpor %ymm2, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rsi), %xmm13
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm11, %xmm13, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdi), %xmm9
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm2
-; AVX512DQ-FAST-NEXT:    vporq %xmm0, %xmm2, %xmm31
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rcx), %xmm14
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm8, %xmm14, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rdx), %xmm8
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm19, %xmm2
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm2, %xmm8, %xmm2
-; AVX512DQ-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r9), %xmm2
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm5, %xmm2, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm2, %xmm3
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa (%r8), %xmm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm2, %xmm4, %xmm2
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27],zero,ymm0[25]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm23, %ymm12
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm12[21],zero,ymm12[19],zero,zero,zero,zero,ymm12[22],zero,ymm12[20],zero,zero
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm26, %ymm11
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm11[25],zero,ymm11[23],zero,zero,zero,zero,ymm11[26],zero,ymm11[24],zero,zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[18],zero,zero,zero,zero,ymm11[21],zero,ymm11[19],zero,zero,zero,zero,ymm11[22],zero,ymm11[20]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
-; AVX512DQ-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128]
-; AVX512DQ-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm2, %ymm19
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm30
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm24, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm2, %xmm2
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,0,1],zmm0[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [2,2,3,3,2,2,3,3]
-; AVX512DQ-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rax), %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,6]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm0, %ymm2, %ymm0
-; AVX512DQ-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa (%rax), %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm24
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm0, %ymm23
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
-; AVX512DQ-FAST-NEXT:    vmovdqu (%rsp), %ymm0 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm1, %ymm25
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vbroadcasti64x2 {{.*#+}} ymm26 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
-; AVX512DQ-FAST-NEXT:    # ymm26 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %ymm26, %ymm0, %ymm1
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm21 = zmm1[0,1,2,3],zmm0[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm28, %xmm1
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm14[8],xmm8[8],xmm14[9],xmm8[9],xmm14[10],xmm8[10],xmm14[11],xmm8[11],xmm14[12],xmm8[12],xmm14[13],xmm8[13],xmm14[14],xmm8[14],xmm14[15],xmm8[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm4 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm4, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm4, %xmm0, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm28
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm27, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa %xmm6, %xmm1
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm6 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm13[8],xmm9[8],xmm13[9],xmm9[9],xmm13[10],xmm9[10],xmm13[11],xmm9[11],xmm13[12],xmm9[12],xmm13[13],xmm9[13],xmm13[14],xmm9[14],xmm13[15],xmm9[15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm5, %xmm4, %xmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm5, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti32x4 $2, %xmm1, %zmm0, %zmm27
-; AVX512DQ-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm10[8],xmm15[8],xmm10[9],xmm15[9],xmm10[10],xmm15[10],xmm10[11],xmm15[11],xmm10[12],xmm15[12],xmm10[13],xmm15[13],xmm10[14],xmm15[14],xmm10[15],xmm15[15]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm3, %xmm1, %xmm1
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm22[0,1,0,1],zmm1[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vmovdqa 32(%rax), %xmm0
-; AVX512DQ-FAST-NEXT:    vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,6]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm0, %xmm29
-; AVX512DQ-FAST-NEXT:    vpermd %ymm1, %ymm2, %ymm0
-; AVX512DQ-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm11, %ymm5
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm20, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm1
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm22 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm19, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm2, %ymm1
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm20 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm0, %ymm2, %ymm4
-; AVX512DQ-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm2 = [9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm16, %ymm0
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm0, %ymm1
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm2, %ymm30
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm19 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero,zero
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm18, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [4,5,4,5,5,7,4,5]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm0
-; AVX512DQ-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm16
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm9[0],xmm13[0],xmm9[1],xmm13[1],xmm9[2],xmm13[2],xmm9[3],xmm13[3],xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm6, %xmm6
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm9, %xmm0, %xmm0
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29,28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29]
-; AVX512DQ-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm17, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm9, %ymm3, %ymm15
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm17 = ymm15[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm25, %ymm2
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm18 = ymm13[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vinserti32x4 $2, %xmm31, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm8, %xmm7, %xmm7
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm8, %xmm0, %xmm13
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm0[23],zero,ymm0[23,24,25,26],zero,ymm0[24],zero,ymm0[30,31]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm0, %ymm25
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm8[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqa %ymm12, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero,zero
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm23[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm11[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm31 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23]
-; AVX512DQ-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm3, %ymm14
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm30, %ymm3
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm3, %ymm1, %ymm13
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm2, %ymm1
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpor %ymm12, %ymm9, %ymm9
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm9, %zmm6
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
-; AVX512DQ-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, %ymm9, %ymm5, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm6, %zmm0, %zmm7
-; AVX512DQ-FAST-NEXT:    vpor %ymm4, %ymm10, %ymm4
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm4
-; AVX512DQ-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm3, %zmm5
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm0, %zmm5
-; AVX512DQ-FAST-NEXT:    vpandq %ymm9, %ymm22, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm20, %zmm0
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vporq %zmm2, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vpandq %ymm26, %ymm19, %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm2, %zmm2
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vporq %zmm4, %zmm2, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
-; AVX512DQ-FAST-NEXT:    vpandq %ymm26, %ymm18, %ymm0
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm15, %zmm0
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vporq %zmm4, %zmm0, %zmm0
-; AVX512DQ-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
-; AVX512DQ-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm0
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} zmm2 = zmm28[0,1,0,1,4,5,4,5]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} zmm8 = zmm27[0,1,0,1,4,5,4,5]
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm4, %zmm8
-; AVX512DQ-FAST-NEXT:    vpandq %ymm26, %ymm13, %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm1
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vporq %zmm2, %zmm1, %zmm1
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm6 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vporq %zmm2, %zmm6, %zmm9
-; AVX512DQ-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm4, %zmm9
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
-; AVX512DQ-FAST-NEXT:    vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm18 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm18 = zmm1[0,1,0,1],mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %xmm29, %xmm3
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[1,1,0,0,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = [0,1,0,1,2,0,0,1]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm2, %ymm4, %ymm19
-; AVX512DQ-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} xmm6 = xmm1[1,1,0,0,4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm6, %ymm4, %ymm17
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} xmm6 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm6, %xmm3, %xmm10
-; AVX512DQ-FAST-NEXT:    vpshufb %xmm6, %xmm1, %xmm6
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
-; AVX512DQ-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm12
-; AVX512DQ-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
-; AVX512DQ-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm14 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
-; AVX512DQ-FAST-NEXT:    # ymm14 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %ymm25, %ymm1
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm1, %ymm15
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm2[23],zero,ymm2[23,24,25,26],zero,ymm2[24],zero,ymm2[30,31]
-; AVX512DQ-FAST-NEXT:    vpshufb %ymm14, %ymm2, %ymm14
-; AVX512DQ-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
-; AVX512DQ-FAST-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm3[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
-; AVX512DQ-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [4,5,4,5,5,7,4,5]
-; AVX512DQ-FAST-NEXT:    vpermd %ymm4, %ymm2, %ymm20
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
-; AVX512DQ-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
-; AVX512DQ-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm22 = mem[2,3,2,3,6,7,6,7]
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm22
-; AVX512DQ-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # ymm23 = mem[0,1,0,1]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm23 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm23
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm24
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm23, %zmm24
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm21
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 32-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm0
-; AVX512DQ-FAST-NEXT:    vpor %ymm12, %ymm15, %ymm2
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm2
-; AVX512DQ-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm7[0,1,2,3],zmm2[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm16
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm16
-; AVX512DQ-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
-; AVX512DQ-FAST-NEXT:    # zmm2 = mem[0,1,0,1,4,5,4,5]
-; AVX512DQ-FAST-NEXT:    vpermq {{.*#+}} zmm5 = zmm31[0,1,0,1,4,5,4,5]
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm5
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm17, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm2
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm2
-; AVX512DQ-FAST-NEXT:    vpor %ymm1, %ymm13, %ymm1
-; AVX512DQ-FAST-NEXT:    vpor %ymm11, %ymm14, %ymm5
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm1
-; AVX512DQ-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm5[0,1,2,3],zmm1[4,5,6,7]
-; AVX512DQ-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm20, %zmm4
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm4
-; AVX512DQ-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm4
-; AVX512DQ-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm4, 128(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm2, (%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm0, 256(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm21, 192(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm24, 64(%rax)
-; AVX512DQ-FAST-NEXT:    vmovdqa64 %zmm22, 384(%rax)
-; AVX512DQ-FAST-NEXT:    addq $1256, %rsp # imm = 0x4E8
-; AVX512DQ-FAST-NEXT:    vzeroupper
-; AVX512DQ-FAST-NEXT:    retq
+; AVX512F-FAST-LABEL: store_i8_stride7_vf64:
+; AVX512F-FAST:       # %bb.0:
+; AVX512F-FAST-NEXT:    subq $1256, %rsp # imm = 0x4E8
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %ymm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
+; AVX512F-FAST-NEXT:    vmovdqa %ymm1, %ymm14
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm2[25],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vmovdqa %ymm2, %ymm13
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %ymm2
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero,zero
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm16
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm2[23],zero,zero,zero,zero,ymm2[26],zero,ymm2[24],zero,zero,zero,zero,ymm2[27],zero,ymm2[25]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm2, %ymm17
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %ymm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm18
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm4[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm4[25],zero,ymm4[23],zero,zero,zero,zero,ymm4[26],zero,ymm4[24],zero,zero
+; AVX512F-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rax), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %ymm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = zero,zero,zero,ymm1[14],zero,zero,zero,zero,zero,zero,ymm1[15],zero,zero,zero,zero,zero,zero,ymm1[16],zero,zero,zero,zero,zero,zero,ymm1[17],zero,zero,zero,zero,zero,zero,ymm1[18]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm23
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[0,1,14],zero,ymm1[12,13,0,1,14,15],zero,ymm1[3,12,13,2,3,16],zero,ymm1[30,31,28,29,16,17],zero,ymm1[31,18,19,28,29,18],zero
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm7 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm26
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm3
+; AVX512F-FAST-NEXT:    vpor %ymm0, %ymm3, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm0, %ymm3
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm25
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %ymm5
+; AVX512F-FAST-NEXT:    vmovdqu %ymm5, (%rsp) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm0 = [13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0,13,0,0,0,128,16,128,14,0,0,0,128,17,128,15,0]
+; AVX512F-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm5, %ymm5
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm0, %ymm30
+; AVX512F-FAST-NEXT:    vporq %ymm3, %ymm5, %ymm24
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdx), %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rcx), %xmm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %xmm8, %xmm6, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm6, %xmm28
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm3, %xmm6
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm9, %xmm19
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm3, %xmm29
+; AVX512F-FAST-NEXT:    vpor %xmm5, %xmm6, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm3, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rdi), %xmm10
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rsi), %xmm6
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm11 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
+; AVX512F-FAST-NEXT:    vpshufb %xmm11, %xmm6, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm10, %xmm9
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm10, %xmm27
+; AVX512F-FAST-NEXT:    vpor %xmm5, %xmm9, %xmm5
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm5, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r9), %xmm15
+; AVX512F-FAST-NEXT:    vmovdqa 32(%r8), %xmm10
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
+; AVX512F-FAST-NEXT:    vpshufb %xmm5, %xmm15, %xmm9
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm0 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
+; AVX512F-FAST-NEXT:    vpshufb %xmm0, %xmm10, %xmm12
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm0, %xmm21
+; AVX512F-FAST-NEXT:    vporq %xmm9, %xmm12, %xmm22
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm13, %ymm7
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm13, %ymm20
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm14, %ymm2
+; AVX512F-FAST-NEXT:    vpor %ymm7, %ymm2, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm16, %ymm7
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = zero,zero,zero,ymm7[14],zero,zero,zero,zero,zero,zero,ymm7[15],zero,zero,zero,zero,zero,zero,ymm7[16],zero,zero,zero,zero,zero,zero,ymm7[17],zero,zero,zero,zero,zero,zero,ymm7[18]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm17, %ymm7
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm7 = ymm7[0,1,14],zero,ymm7[12,13,0,1,14,15],zero,ymm7[3,12,13,2,3,16],zero,ymm7[30,31,28,29,16,17],zero,ymm7[31,18,19,28,29,18],zero
+; AVX512F-FAST-NEXT:    vpor %ymm2, %ymm7, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm18, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu64 %ymm18, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm25, %ymm7
+; AVX512F-FAST-NEXT:    vpshufb %ymm7, %ymm2, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm4, %ymm0
+; AVX512F-FAST-NEXT:    vpor %ymm2, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%rsi), %xmm13
+; AVX512F-FAST-NEXT:    vpshufb %xmm11, %xmm13, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rdi), %xmm9
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm9, %xmm2
+; AVX512F-FAST-NEXT:    vporq %xmm0, %xmm2, %xmm31
+; AVX512F-FAST-NEXT:    vmovdqa (%rcx), %xmm14
+; AVX512F-FAST-NEXT:    vpshufb %xmm8, %xmm14, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rdx), %xmm8
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm19, %xmm2
+; AVX512F-FAST-NEXT:    vpshufb %xmm2, %xmm8, %xmm2
+; AVX512F-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r9), %xmm2
+; AVX512F-FAST-NEXT:    vpshufb %xmm5, %xmm2, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm2, %xmm3
+; AVX512F-FAST-NEXT:    vmovdqa %xmm2, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa (%r8), %xmm4
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm21, %xmm2
+; AVX512F-FAST-NEXT:    vpshufb %xmm2, %xmm4, %xmm2
+; AVX512F-FAST-NEXT:    vmovdqa %xmm4, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpor %xmm0, %xmm2, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero,ymm0[27],zero,ymm0[25]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm23, %ymm12
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm12[21],zero,ymm12[19],zero,zero,zero,zero,ymm12[22],zero,ymm12[20],zero,zero
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm26, %ymm11
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm11[25],zero,ymm11[23],zero,zero,zero,zero,ymm11[26],zero,ymm11[24],zero,zero,zero,zero
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[18],zero,zero,zero,zero,ymm11[21],zero,ymm11[19],zero,zero,zero,zero,ymm11[22],zero,ymm11[20]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27,24,25,128,23,128,21,22,23,26,128,24,128,28,29,26,27]
+; AVX512F-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm5 = [18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128,18,128,18,19,20,21,128,19,128,25,26,27,22,128,20,128]
+; AVX512F-FAST-NEXT:    # ymm5 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm2, %ymm19
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm1, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm30
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm24, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm2 = xmm4[8],xmm3[8],xmm4[9],xmm3[9],xmm4[10],xmm3[10],xmm4[11],xmm3[11],xmm4[12],xmm3[12],xmm4[13],xmm3[13],xmm4[14],xmm3[14],xmm4[15],xmm3[15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm3 = <u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10>
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm2, %xmm2
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm2[0,1,0,1],zmm0[4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm2 = [2,2,3,3,2,2,3,3]
+; AVX512F-FAST-NEXT:    # ymm2 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa (%rax), %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 16-byte Spill
+; AVX512F-FAST-NEXT:    vpshufhw {{.*#+}} xmm0 = xmm0[0,1,2,3,4,5,5,6]
+; AVX512F-FAST-NEXT:    vpermd %ymm0, %ymm2, %ymm0
+; AVX512F-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa (%rax), %ymm4
+; AVX512F-FAST-NEXT:    vmovdqu %ymm4, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm5 = [128,13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128]
+; AVX512F-FAST-NEXT:    vpshufb %ymm5, %ymm4, %ymm4
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm5, %ymm18
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm0, %zmm24
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm0 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29],zero,zero
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm0, %ymm23
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14]
+; AVX512F-FAST-NEXT:    vmovdqu (%rsp), %ymm0 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm1, %ymm25
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm1[27],zero,zero,zero,zero,ymm1[30],zero,ymm1[28],zero,zero,zero,zero,ymm1[31],zero,ymm1[29]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vbroadcasti32x4 {{.*#+}} ymm26 = [18374967954648269055,71777218572844800,18374967954648269055,71777218572844800]
+; AVX512F-FAST-NEXT:    # ymm26 = mem[0,1,2,3,0,1,2,3]
+; AVX512F-FAST-NEXT:    vpternlogq $248, %ymm26, %ymm0, %ymm1
+; AVX512F-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm10[0],xmm15[0],xmm10[1],xmm15[1],xmm10[2],xmm15[2],xmm10[3],xmm15[3],xmm10[4],xmm15[4],xmm10[5],xmm15[5],xmm10[6],xmm15[6],xmm10[7],xmm15[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm0 = xmm0[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm21 = zmm1[0,1,2,3],zmm0[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm29, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm28, %xmm1
+; AVX512F-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm0[0],xmm1[0],xmm0[1],xmm1[1],xmm0[2],xmm1[2],xmm0[3],xmm1[3],xmm0[4],xmm1[4],xmm0[5],xmm1[5],xmm0[6],xmm1[6],xmm0[7],xmm1[7]
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm14[8],xmm8[8],xmm14[9],xmm8[9],xmm14[10],xmm8[10],xmm14[11],xmm8[11],xmm14[12],xmm8[12],xmm14[13],xmm8[13],xmm14[14],xmm8[14],xmm14[15],xmm8[15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm4 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
+; AVX512F-FAST-NEXT:    vpshufb %xmm4, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vmovdqu %ymm1, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %xmm4, %xmm0, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm1 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm28
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm27, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqa %xmm6, %xmm1
+; AVX512F-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm6 = xmm0[0],xmm6[0],xmm0[1],xmm6[1],xmm0[2],xmm6[2],xmm0[3],xmm6[3],xmm0[4],xmm6[4],xmm0[5],xmm6[5],xmm0[6],xmm6[6],xmm0[7],xmm6[7]
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm1[8],xmm0[8],xmm1[9],xmm0[9],xmm1[10],xmm0[10],xmm1[11],xmm0[11],xmm1[12],xmm0[12],xmm1[13],xmm0[13],xmm1[14],xmm0[14],xmm1[15],xmm0[15]
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm4 = xmm13[8],xmm9[8],xmm13[9],xmm9[9],xmm13[10],xmm9[10],xmm13[11],xmm9[11],xmm13[12],xmm9[12],xmm13[13],xmm9[13],xmm13[14],xmm9[14],xmm13[15],xmm9[15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm5 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
+; AVX512F-FAST-NEXT:    vpshufb %xmm5, %xmm4, %xmm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpshufb %xmm5, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti32x4 $2, %xmm1, %zmm0, %zmm27
+; AVX512F-FAST-NEXT:    vpunpckhbw {{.*#+}} xmm1 = xmm10[8],xmm15[8],xmm10[9],xmm15[9],xmm10[10],xmm15[10],xmm10[11],xmm15[11],xmm10[12],xmm15[12],xmm10[13],xmm15[13],xmm10[14],xmm15[14],xmm10[15],xmm15[15]
+; AVX512F-FAST-NEXT:    vpshufb %xmm3, %xmm1, %xmm1
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm0 = zmm22[0,1,0,1],zmm1[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vmovdqa 32(%rax), %xmm0
+; AVX512F-FAST-NEXT:    vpshufhw {{.*#+}} xmm1 = xmm0[0,1,2,3,4,5,5,6]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm0, %xmm29
+; AVX512F-FAST-NEXT:    vpermd %ymm1, %ymm2, %ymm0
+; AVX512F-FAST-NEXT:    vmovdqu %ymm0, {{[-0-9]+}}(%r{{[sb]}}p) # 32-byte Spill
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm1 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm11, %ymm5
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm20, %ymm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm1
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm22 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm19, %ymm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm2, %ymm1
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm20 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm10 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm0[18],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm30, %ymm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm0, %ymm2, %ymm4
+; AVX512F-FAST-NEXT:    vpbroadcastq {{.*#+}} ymm2 = [9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10,9,8,7,0,0,0,11,10]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm16, %ymm0
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm0, %ymm1
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm2, %ymm30
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm19 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm11 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm0[21],zero,ymm0[19],zero,zero,zero,zero,ymm0[22],zero,ymm0[20],zero,zero
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm18, %ymm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm1, %ymm0, %ymm3
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} ymm0 = ymm0[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm1 = [4,5,4,5,5,7,4,5]
+; AVX512F-FAST-NEXT:    vpermd %ymm0, %ymm1, %ymm0
+; AVX512F-FAST-NEXT:    vpand {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm3, %zmm16
+; AVX512F-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm9[0],xmm13[0],xmm9[1],xmm13[1],xmm9[2],xmm13[2],xmm9[3],xmm13[3],xmm9[4],xmm13[4],xmm9[5],xmm13[5],xmm9[6],xmm13[6],xmm9[7],xmm13[7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm9 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm6, %xmm6
+; AVX512F-FAST-NEXT:    vpshufb %xmm9, %xmm0, %xmm0
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29,28,29,30,128,28,128,30,31,30,31,128,29,128,31,28,29]
+; AVX512F-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm17, %ymm3
+; AVX512F-FAST-NEXT:    vpshufb %ymm9, %ymm3, %ymm15
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm17 = ymm15[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm25, %ymm2
+; AVX512F-FAST-NEXT:    vpshufb %ymm2, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm18 = ymm13[2,3,2,3]
+; AVX512F-FAST-NEXT:    vinserti32x4 $2, %xmm31, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqu64 %zmm0, {{[-0-9]+}}(%r{{[sb]}}p) # 64-byte Spill
+; AVX512F-FAST-NEXT:    vpunpcklbw {{.*#+}} xmm0 = xmm8[0],xmm14[0],xmm8[1],xmm14[1],xmm8[2],xmm14[2],xmm8[3],xmm14[3],xmm8[4],xmm14[4],xmm8[5],xmm14[5],xmm8[6],xmm14[6],xmm8[7],xmm14[7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm8 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
+; AVX512F-FAST-NEXT:    vpshufb %xmm8, %xmm7, %xmm7
+; AVX512F-FAST-NEXT:    vpshufb %xmm8, %xmm0, %xmm13
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm8 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm0[23],zero,ymm0[23,24,25,26],zero,ymm0[24],zero,ymm0[30,31]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm0, %ymm25
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm8[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqa %ymm12, %ymm1
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm12 = ymm12[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,ymm12[30],zero,ymm12[28],zero,zero,zero,zero,ymm12[31],zero,ymm12[29],zero,zero,zero
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm5 = ymm5[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm0 = ymm23[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm8 = ymm11[2,3,2,3]
+; AVX512F-FAST-NEXT:    vinserti32x4 $2, {{[-0-9]+}}(%r{{[sb]}}p), %zmm13, %zmm31 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23,18,19,20,21,128,19,128,21,20,21,22,128,20,128,22,23]
+; AVX512F-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm3, %ymm14
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm30, %ymm3
+; AVX512F-FAST-NEXT:    vpshufb %ymm3, %ymm1, %ymm13
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm2, %ymm1
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpor %ymm12, %ymm9, %ymm9
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm9, %zmm6
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm9 = [18374966859431673855,18446463693966278655,18374966859431673855,18446463693966278655]
+; AVX512F-FAST-NEXT:    # ymm9 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpternlogq $248, %ymm9, %ymm5, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm7, %zmm0, %zmm7
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm0 = [255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm6, %zmm0, %zmm7
+; AVX512F-FAST-NEXT:    vpor %ymm4, %ymm10, %ymm4
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm2, %zmm4
+; AVX512F-FAST-NEXT:    vpor %ymm8, %ymm14, %ymm2
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm3 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm3, %zmm5
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm4, %zmm0, %zmm5
+; AVX512F-FAST-NEXT:    vpandq %ymm9, %ymm22, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm20, %zmm0
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vporq %zmm2, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vpandq %ymm26, %ymm19, %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm17, %zmm2, %zmm2
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vporq %zmm4, %zmm2, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm2
+; AVX512F-FAST-NEXT:    vpandq %ymm26, %ymm18, %ymm0
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm0, %zmm15, %zmm0
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm4 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm4 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vporq %zmm4, %zmm0, %zmm0
+; AVX512F-FAST-NEXT:    vmovdqa64 {{.*#+}} zmm4 = [255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255,255,255,255,255,0,0,255]
+; AVX512F-FAST-NEXT:    vpternlogq $184, %zmm2, %zmm4, %zmm0
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} zmm2 = zmm28[0,1,0,1,4,5,4,5]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} zmm8 = zmm27[0,1,0,1,4,5,4,5]
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm2, %zmm4, %zmm8
+; AVX512F-FAST-NEXT:    vpandq %ymm26, %ymm13, %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm1, %zmm1
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vporq %zmm2, %zmm1, %zmm1
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm2 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm6 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm6 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vporq %zmm2, %zmm6, %zmm9
+; AVX512F-FAST-NEXT:    vpternlogq $226, %zmm1, %zmm4, %zmm9
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpunpcklbw {{[-0-9]+}}(%r{{[sb]}}p), %xmm1, %xmm1 # 16-byte Folded Reload
+; AVX512F-FAST-NEXT:    # xmm1 = xmm1[0],mem[0],xmm1[1],mem[1],xmm1[2],mem[2],xmm1[3],mem[3],xmm1[4],mem[4],xmm1[5],mem[5],xmm1[6],mem[6],xmm1[7],mem[7]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} xmm1 = xmm1[u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u]
+; AVX512F-FAST-NEXT:    vshufi64x2 $0, {{[-0-9]+}}(%r{{[sb]}}p), %zmm1, %zmm18 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm18 = zmm1[0,1,0,1],mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa64 %xmm29, %xmm3
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm2 = xmm3[1,1,0,0,4,5,6,7]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm4 = [0,1,0,1,2,0,0,1]
+; AVX512F-FAST-NEXT:    vpermd %ymm2, %ymm4, %ymm19
+; AVX512F-FAST-NEXT:    vmovdqa {{[-0-9]+}}(%r{{[sb]}}p), %xmm1 # 16-byte Reload
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} xmm6 = xmm1[1,1,0,0,4,5,6,7]
+; AVX512F-FAST-NEXT:    vpermd %ymm6, %ymm4, %ymm17
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} xmm6 = [4,5,4,5,4,5,8,9,6,7,6,7,6,7,6,7]
+; AVX512F-FAST-NEXT:    vpshufb %xmm6, %xmm3, %xmm10
+; AVX512F-FAST-NEXT:    vpshufb %xmm6, %xmm1, %xmm6
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm11 = [128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22]
+; AVX512F-FAST-NEXT:    # ymm11 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm12
+; AVX512F-FAST-NEXT:    vmovdqu (%rsp), %ymm1 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm13 = ymm1[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,ymm1[25],zero,ymm1[23],zero,zero,zero,zero,ymm1[26],zero,ymm1[24],zero,zero
+; AVX512F-FAST-NEXT:    vpshufb %ymm11, %ymm1, %ymm11
+; AVX512F-FAST-NEXT:    vbroadcasti128 {{.*#+}} ymm14 = [20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128,20,128,18,128,20,21,20,21,128,19,128,19,20,21,22,128]
+; AVX512F-FAST-NEXT:    # ymm14 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vmovdqa64 %ymm25, %ymm1
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm1, %ymm15
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm2 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm1 = ymm2[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,ymm2[23],zero,ymm2[23,24,25,26],zero,ymm2[24],zero,ymm2[30,31]
+; AVX512F-FAST-NEXT:    vpshufb %ymm14, %ymm2, %ymm14
+; AVX512F-FAST-NEXT:    vmovdqu {{[-0-9]+}}(%r{{[sb]}}p), %ymm3 # 32-byte Reload
+; AVX512F-FAST-NEXT:    vpshuflw {{.*#+}} ymm4 = ymm3[2,1,1,2,4,5,6,7,10,9,9,10,12,13,14,15]
+; AVX512F-FAST-NEXT:    vmovdqa {{.*#+}} ymm2 = [4,5,4,5,5,7,4,5]
+; AVX512F-FAST-NEXT:    vpermd %ymm4, %ymm2, %ymm20
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm2 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,28,29,26,27,28,29,30,31,30,31,28,29,28,29,30,31]
+; AVX512F-FAST-NEXT:    vpshufb {{.*#+}} ymm4 = ymm3[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,22,23,26,27,24,25,22,23,24,25,26,27,26,27,24,25]
+; AVX512F-FAST-NEXT:    vpermq $238, {{[-0-9]+}}(%r{{[sb]}}p), %zmm22 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm22 = mem[2,3,2,3,6,7,6,7]
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm22
+; AVX512F-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm0 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    # ymm0 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %ymm23 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    # ymm23 = mem[0,1,0,1]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm2 = ymm2[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[0,0,1,0]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm12 = ymm12[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm15 = ymm15[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,0,1,0]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm13 = ymm13[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm1 = ymm1[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} ymm4 = ymm4[2,3,2,3]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm0, %zmm0 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm23, %zmm23 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm23
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm0 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm24
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm23, %zmm24
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm19, %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $228, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %zmm21
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm7, %zmm21
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, {{[-0-9]+}}(%r{{[sb]}}p), %zmm10, %zmm0 # 32-byte Folded Reload
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm0
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm8, %zmm0
+; AVX512F-FAST-NEXT:    vpor %ymm12, %ymm15, %ymm2
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm2, %zmm0, %zmm2
+; AVX512F-FAST-NEXT:    vmovdqu64 {{[-0-9]+}}(%r{{[sb]}}p), %zmm7 # 64-byte Reload
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm2 = zmm7[0,1,2,3],zmm2[4,5,6,7]
+; AVX512F-FAST-NEXT:    vpternlogq $248, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm16
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm16
+; AVX512F-FAST-NEXT:    vpermq $68, {{[-0-9]+}}(%r{{[sb]}}p), %zmm2 # 64-byte Folded Reload
+; AVX512F-FAST-NEXT:    # zmm2 = mem[0,1,0,1,4,5,4,5]
+; AVX512F-FAST-NEXT:    vpermq {{.*#+}} zmm5 = zmm31[0,1,0,1,4,5,4,5]
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm2, %zmm5
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm6, %zmm17, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm18, %zmm2
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm5, %zmm2
+; AVX512F-FAST-NEXT:    vpor %ymm1, %ymm13, %ymm1
+; AVX512F-FAST-NEXT:    vpor %ymm11, %ymm14, %ymm5
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm1
+; AVX512F-FAST-NEXT:    vshufi64x2 {{.*#+}} zmm1 = zmm5[0,1,2,3],zmm1[4,5,6,7]
+; AVX512F-FAST-NEXT:    vinserti64x4 $1, %ymm4, %zmm20, %zmm4
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm1, %zmm4
+; AVX512F-FAST-NEXT:    vpternlogq $216, {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm9, %zmm4
+; AVX512F-FAST-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm4, 128(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm2, (%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm16, 320(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm0, 256(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm21, 192(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm24, 64(%rax)
+; AVX512F-FAST-NEXT:    vmovdqa64 %zmm22, 384(%rax)
+; AVX512F-FAST-NEXT:    addq $1256, %rsp # imm = 0x4E8
+; AVX512F-FAST-NEXT:    vzeroupper
+; AVX512F-FAST-NEXT:    retq
 ;
-; AVX512BW-ONLY-SLOW-LABEL: store_i8_stride7_vf64:
-; AVX512BW-ONLY-SLOW:       # %bb.0:
-; AVX512BW-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rax), %ymm15
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rax), %ymm2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm9, %ymm15, %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm15, %ymm1, %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r9), %ymm10
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm22 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm22, %ymm10, %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r8), %ymm11
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm11, %ymm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r9), %xmm4
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%r8), %xmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm6 = xmm6[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm20 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm16
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm16, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm17
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm17, %ymm5
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm3, %ymm5, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rcx), %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm7, %xmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm21
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm18
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm26 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm18, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm19
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm27 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm19, %ymm7
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rdi), %xmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa (%rsi), %xmm8
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm24 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm13 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm13, %xmm24, %xmm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm24, %zmm3
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm3 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm3 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %ymm2, %ymm21, %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm20, %zmm9, %zmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm22, %ymm20, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm22
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm22, %ymm23
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm24, %ymm23, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm23, %ymm20, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[20],zero,ymm22[18],zero,ymm22[20,21,20,21],zero,ymm22[19],zero,ymm22[19,20,21,22],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm29[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm29, %ymm28, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm24, %zmm28
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $145249953336295682, %r10 # imm = 0x204081020408102
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm9, %zmm28 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm29
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm30
-; AVX512BW-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm30[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,1,1,4,4,5,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm24 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $676341840, %r10d # imm = 0x28502850
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %r10d, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm24, %ymm29, %ymm9 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm30, %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm29, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm26, %ymm27, %ymm26
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm26, %zmm9
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm31
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm14, %ymm31, %ymm14
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm0, %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm14, %ymm25, %ymm14
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm26, %ymm0, %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[18],zero,ymm31[18,19,20,21],zero,ymm31[19],zero,ymm31[25,26,27,22],zero,ymm31[20],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm27, %ymm25, %ymm25
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm25, %zmm14, %zmm14
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k2
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm14, %zmm9 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm9 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm30[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm25 = [13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14]
-; AVX512BW-ONLY-SLOW-NEXT:    movl $338170920, %r10d # imm = 0x14281428
-; AVX512BW-ONLY-SLOW-NEXT:    kmovd %r10d, %k4
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm29, %ymm14 {%k4}
-; AVX512BW-ONLY-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm28 = [25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
-; AVX512BW-ONLY-SLOW-NEXT:    # ymm28 = mem[0,1,2,3,0,1,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm28, %ymm29, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm29, %ymm30, %ymm30
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm27, %zmm14
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[24,25],zero,ymm31[23],zero,ymm31[21,22,23,26],zero,ymm31[24],zero,ymm31[28,29,26,27]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm30 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm30
-; AVX512BW-ONLY-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm27 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm31 = ymm31[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm31 = ymm31[0,2,3,3,4,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm31, %ymm0 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r8), %zmm30
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $1742999440035548184, %r10 # imm = 0x183060C183060C18
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm14 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%r9), %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm0[4,5,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm22[23],zero,zmm22[23,24,25,26],zero,zmm22[24],zero,zmm22[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm22[59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm20 = zmm20[0,1,2,3],zmm30[4,5,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} zmm20 = zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm20[25],zero,zmm20[23],zero,zero,zero,zero,zmm20[26],zero,zmm20[24],zero,zero,zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm20[59],zero,zero,zero,zero,zmm20[62],zero,zmm20[60],zero,zero,zero,zero,zmm20[63],zero,zmm20[61]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %zmm22, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rax), %zmm22
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k3
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k5}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshuflw {{.*#+}} ymm20 = ymm18[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm20 = ymm20[0,0,1,1,4,4,5,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm24, %ymm19, %ymm20 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm28, %ymm19, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm29, %ymm18, %ymm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %ymm24, %ymm28, %ymm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm28
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rcx), %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm26, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 (%rdx), %zmm24
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm24
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} zmm24 = zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm24[18,19,20,21],zero,zmm24[19],zero,zmm24[25,26,27,22],zero,zmm24[20],zero,zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm24[55],zero,zmm24[53,54,55,58],zero,zmm24[56],zero,zmm24[60,61,58,59]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %zmm24, %zmm20, %zmm20
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r9), %xmm24
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm20 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%r8), %xmm26
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %zmm23, %zmm0, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdx), %xmm23
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm30, %zmm28
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm28[18],zero,zmm28[20,21,20,21],zero,zmm28[19],zero,zmm28[19,20,21,22],zero,zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm28[55],zero,zmm28[55,56,57,58],zero,zmm28[56],zero,zmm28[62,63]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %zmm28, %zmm0, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rcx), %xmm28
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm22, %zmm15
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm15, %zmm21, %zmm21
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm0 {%k5}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 32(%rdi), %xmm21
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm20 {%k5}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm18 = ymm18[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm29 = ymm18[2,2,3,3,6,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm25, %ymm19, %ymm29 {%k4}
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm21[0],xmm0[0],xmm21[1],xmm0[1],xmm21[2],xmm0[2],xmm21[3],xmm0[3],xmm21[4],xmm0[4],xmm21[5],xmm0[5],xmm21[6],xmm0[6],xmm21[7],xmm0[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm18 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm18, %xmm19, %xmm19
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm19 = zmm29[2,3,2,3],zmm19[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm25 = xmm23[0],xmm28[0],xmm23[1],xmm28[1],xmm23[2],xmm28[2],xmm23[3],xmm28[3],xmm23[4],xmm28[4],xmm23[5],xmm28[5],xmm23[6],xmm28[6],xmm23[7],xmm28[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %ymm27, %ymm17, %ymm27
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm17 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm17, %xmm25, %xmm25
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufhw {{.*#+}} ymm16 = ymm16[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufd {{.*#+}} ymm16 = ymm16[0,2,3,3,4,6,7,7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %ymm16, %ymm27 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm16 = zmm27[2,3,2,3],zmm25[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm16 {%k2}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm10[27],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %ymm10, %ymm11, %ymm11
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm26[0],xmm24[0],xmm26[1],xmm24[1],xmm26[2],xmm24[2],xmm26[3],xmm24[3],xmm26[4],xmm24[4],xmm26[5],xmm24[5],xmm26[6],xmm24[6],xmm26[7],xmm24[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm10 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm10, %xmm19, %xmm19
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm19[0,1,0,1]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm11, %zmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm19 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm22, %zmm19, %zmm19
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $580999813345182728, %rax # imm = 0x810204081020408
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm11 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm11, %zmm16 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm19, %xmm28, %xmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm25 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm25, %xmm23, %xmm27
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm11, %xmm27, %xmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm23 = xmm28[8],xmm23[8],xmm28[9],xmm23[9],xmm28[10],xmm23[10],xmm28[11],xmm23[11],xmm28[12],xmm23[12],xmm28[13],xmm23[13],xmm28[14],xmm23[14],xmm28[15],xmm23[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm23, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm12, %zmm11, %zmm11
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm0, %xmm23
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm27 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm27, %xmm21, %xmm28
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm23, %xmm28, %xmm23
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm21[8],xmm0[9],xmm21[9],xmm0[10],xmm21[10],xmm0[11],xmm21[11],xmm0[12],xmm21[12],xmm0[13],xmm21[13],xmm0[14],xmm21[14],xmm0[15],xmm21[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm21 = zmm11[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm23, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm11 = zmm0[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm11 {%k3}
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm24, %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm21
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa {{.*#+}} xmm2 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm2, %xmm26, %xmm22
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm13, %xmm22, %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm26[8],xmm24[8],xmm26[9],xmm24[9],xmm26[10],xmm24[10],xmm26[11],xmm24[11],xmm26[12],xmm24[12],xmm26[13],xmm24[13],xmm26[14],xmm24[14],xmm26[15],xmm24[15]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb {{.*#+}} xmm22 = xmm22[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm22, %zmm13, %zmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm13 = zmm13[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $290499906672591364, %rax # imm = 0x408102040810204
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm13 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm11 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm19, %xmm6, %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm25, %xmm5, %xmm19
-; AVX512BW-ONLY-SLOW-NEXT:    vporq %xmm13, %xmm19, %xmm13
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm17, %xmm5, %xmm5
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm5, %zmm5
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm12, %xmm8, %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm27, %xmm7, %xmm12
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm18, %xmm7, %xmm7
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm6, %zmm7, %zmm6
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm5 = zmm5[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm6 = zmm6[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm5, %zmm6 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm2, %xmm1, %xmm2
-; AVX512BW-ONLY-SLOW-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3],xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
-; AVX512BW-ONLY-SLOW-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm0
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
-; AVX512BW-ONLY-SLOW-NEXT:    vpermw %zmm15, %zmm1, %zmm1
-; AVX512BW-ONLY-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[0,1,0,1,4,5,4,5]
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
-; AVX512BW-ONLY-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm6 {%k1}
-; AVX512BW-ONLY-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm9, 320(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm11, 256(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm16, 192(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm3, 64(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vmovdqa64 %zmm14, 384(%rax)
-; AVX512BW-ONLY-SLOW-NEXT:    vzeroupper
-; AVX512BW-ONLY-SLOW-NEXT:    retq
+; AVX512BW-SLOW-LABEL: store_i8_stride7_vf64:
+; AVX512BW-SLOW:       # %bb.0:
+; AVX512BW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rax), %ymm15
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rax), %ymm2
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm9, %ymm15, %ymm0
+; AVX512BW-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
+; AVX512BW-SLOW-NEXT:    # ymm1 = mem[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm15, %ymm1, %ymm1
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm3
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r9), %ymm10
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm22 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm22, %ymm10, %ymm0
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r8), %ymm11
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm11, %ymm1
+; AVX512BW-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm5
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r9), %xmm4
+; AVX512BW-SLOW-NEXT:    vmovdqa (%r8), %xmm1
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} xmm6 = xmm6[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm20
+; AVX512BW-SLOW-NEXT:    movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm20 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm16
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm14, %ymm16, %ymm3
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm17
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm17, %ymm5
+; AVX512BW-SLOW-NEXT:    vpor %ymm3, %ymm5, %ymm3
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rcx), %xmm6
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm7, %xmm7
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm21
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm18
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm26 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm18, %ymm3
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm19
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm27 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm19, %ymm7
+; AVX512BW-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rdi), %xmm7
+; AVX512BW-SLOW-NEXT:    vmovdqa (%rsi), %xmm8
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm24 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm13 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm13, %xmm24, %xmm24
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm24, %zmm3
+; AVX512BW-SLOW-NEXT:    movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm3 {%k1}
+; AVX512BW-SLOW-NEXT:    movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm3 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
+; AVX512BW-SLOW-NEXT:    vpermw %ymm2, %ymm21, %ymm20
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm20, %zmm9, %zmm9
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm20
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm22, %ymm20, %ymm24
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm22
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm22, %ymm23
+; AVX512BW-SLOW-NEXT:    vporq %ymm24, %ymm23, %ymm24
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm23, %ymm20, %ymm28
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[20],zero,ymm22[18],zero,ymm22[20,21,20,21],zero,ymm22[19],zero,ymm22[19,20,21,22],zero
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm29[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm29, %ymm28, %ymm28
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm24, %zmm28
+; AVX512BW-SLOW-NEXT:    movabsq $145249953336295682, %r10 # imm = 0x204081020408102
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm9, %zmm28 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm29
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm30
+; AVX512BW-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm30[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,1,1,4,4,5,5]
+; AVX512BW-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm24 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
+; AVX512BW-SLOW-NEXT:    movl $676341840, %r10d # imm = 0x28502850
+; AVX512BW-SLOW-NEXT:    kmovd %r10d, %k1
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm24, %ymm29, %ymm9 {%k1}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm30, %ymm26
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm29, %ymm27
+; AVX512BW-SLOW-NEXT:    vporq %ymm26, %ymm27, %ymm26
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm26, %zmm9
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm31
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm14, %ymm31, %ymm14
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm0
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm0, %ymm25
+; AVX512BW-SLOW-NEXT:    vporq %ymm14, %ymm25, %ymm14
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm26, %ymm0, %ymm25
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[18],zero,ymm31[18,19,20,21],zero,ymm31[19],zero,ymm31[25,26,27,22],zero,ymm31[20],zero
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm27, %ymm25, %ymm25
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm25, %zmm14, %zmm14
+; AVX512BW-SLOW-NEXT:    movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k2
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm14, %zmm9 {%k2}
+; AVX512BW-SLOW-NEXT:    movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm9 {%k3}
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm30[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm25 = [13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14]
+; AVX512BW-SLOW-NEXT:    movl $338170920, %r10d # imm = 0x14281428
+; AVX512BW-SLOW-NEXT:    kmovd %r10d, %k4
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm29, %ymm14 {%k4}
+; AVX512BW-SLOW-NEXT:    vbroadcasti32x4 {{.*#+}} ymm28 = [25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
+; AVX512BW-SLOW-NEXT:    # ymm28 = mem[0,1,2,3,0,1,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm28, %ymm29, %ymm27
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm29, %ymm30, %ymm30
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm27
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm27, %zmm14
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[24,25],zero,ymm31[23],zero,ymm31[21,22,23,26],zero,ymm31[24],zero,ymm31[28,29,26,27]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm30 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm30
+; AVX512BW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm27 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm0
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm31 = ymm31[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm31 = ymm31[0,2,3,3,4,6,7,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm31, %ymm0 {%k1}
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm30
+; AVX512BW-SLOW-NEXT:    movabsq $1742999440035548184, %r10 # imm = 0x183060C183060C18
+; AVX512BW-SLOW-NEXT:    kmovq %r10, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm14 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%r9), %zmm0
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm0[4,5,6,7]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm22[23],zero,zmm22[23,24,25,26],zero,zmm22[24],zero,zmm22[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm22[59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm20 = zmm20[0,1,2,3],zmm30[4,5,6,7]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} zmm20 = zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm20[25],zero,zmm20[23],zero,zero,zero,zero,zmm20[26],zero,zmm20[24],zero,zero,zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm20[59],zero,zero,zero,zero,zmm20[62],zero,zmm20[60],zero,zero,zero,zero,zmm20[63],zero,zmm20[61]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vporq %zmm22, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rax), %zmm22
+; AVX512BW-SLOW-NEXT:    movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k3
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
+; AVX512BW-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm20
+; AVX512BW-SLOW-NEXT:    movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k5
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k5}
+; AVX512BW-SLOW-NEXT:    vpshuflw {{.*#+}} ymm20 = ymm18[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm20 = ymm20[0,0,1,1,4,4,5,5]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm24, %ymm19, %ymm20 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm28, %ymm19, %ymm24
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm29, %ymm18, %ymm28
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vporq %ymm24, %ymm28, %ymm24
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm28
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rcx), %zmm20
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm26, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 (%rdx), %zmm24
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm24
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} zmm24 = zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm24[18,19,20,21],zero,zmm24[19],zero,zmm24[25,26,27,22],zero,zmm24[20],zero,zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm24[55],zero,zmm24[53,54,55,58],zero,zmm24[56],zero,zmm24[60,61,58,59]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vporq %zmm24, %zmm20, %zmm20
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r9), %xmm24
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm20 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%r8), %xmm26
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
+; AVX512BW-SLOW-NEXT:    vpshufb %zmm23, %zmm0, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdx), %xmm23
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm30, %zmm28
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm28[18],zero,zmm28[20,21,20,21],zero,zmm28[19],zero,zmm28[19,20,21,22],zero,zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm28[55],zero,zmm28[55,56,57,58],zero,zmm28[56],zero,zmm28[62,63]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
+; AVX512BW-SLOW-NEXT:    vporq %zmm28, %zmm0, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rcx), %xmm28
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm22, %zmm15
+; AVX512BW-SLOW-NEXT:    vpermw %zmm15, %zmm21, %zmm21
+; AVX512BW-SLOW-NEXT:    movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k5
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm0 {%k5}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 32(%rdi), %xmm21
+; AVX512BW-SLOW-NEXT:    movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k5
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm20 {%k5}
+; AVX512BW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm0
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm18 = ymm18[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm29 = ymm18[2,2,3,3,6,6,7,7]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm25, %ymm19, %ymm29 {%k4}
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm21[0],xmm0[0],xmm21[1],xmm0[1],xmm21[2],xmm0[2],xmm21[3],xmm0[3],xmm21[4],xmm0[4],xmm21[5],xmm0[5],xmm21[6],xmm0[6],xmm21[7],xmm0[7]
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm18 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm18, %xmm19, %xmm19
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm19 = zmm29[2,3,2,3],zmm19[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm25 = xmm23[0],xmm28[0],xmm23[1],xmm28[1],xmm23[2],xmm28[2],xmm23[3],xmm28[3],xmm23[4],xmm28[4],xmm23[5],xmm28[5],xmm23[6],xmm28[6],xmm23[7],xmm28[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %ymm27, %ymm17, %ymm27
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm17 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm17, %xmm25, %xmm25
+; AVX512BW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm16 = ymm16[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
+; AVX512BW-SLOW-NEXT:    vpshufd {{.*#+}} ymm16 = ymm16[0,2,3,3,4,6,7,7]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %ymm16, %ymm27 {%k1}
+; AVX512BW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm16 = zmm27[2,3,2,3],zmm25[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm16 {%k2}
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm10[27],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
+; AVX512BW-SLOW-NEXT:    vpor %ymm10, %ymm11, %ymm11
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm26[0],xmm24[0],xmm26[1],xmm24[1],xmm26[2],xmm24[2],xmm26[3],xmm24[3],xmm26[4],xmm24[4],xmm26[5],xmm24[5],xmm26[6],xmm24[6],xmm26[7],xmm24[7]
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm10 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm10, %xmm19, %xmm19
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm19[0,1,0,1]
+; AVX512BW-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm11, %zmm11
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm19 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
+; AVX512BW-SLOW-NEXT:    vpermw %zmm22, %zmm19, %zmm19
+; AVX512BW-SLOW-NEXT:    movabsq $580999813345182728, %rax # imm = 0x810204081020408
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm11 {%k1}
+; AVX512BW-SLOW-NEXT:    movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm11, %zmm16 {%k1}
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm19, %xmm28, %xmm11
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm25 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm25, %xmm23, %xmm27
+; AVX512BW-SLOW-NEXT:    vporq %xmm11, %xmm27, %xmm11
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm23 = xmm28[8],xmm23[8],xmm28[9],xmm23[9],xmm28[10],xmm23[10],xmm28[11],xmm23[11],xmm28[12],xmm23[12],xmm28[13],xmm23[13],xmm28[14],xmm23[14],xmm28[15],xmm23[15]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm23, %xmm12
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm12, %zmm11, %zmm11
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm0, %xmm23
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm27 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm27, %xmm21, %xmm28
+; AVX512BW-SLOW-NEXT:    vporq %xmm23, %xmm28, %xmm23
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm21[8],xmm0[9],xmm21[9],xmm0[10],xmm21[10],xmm0[11],xmm21[11],xmm0[12],xmm21[12],xmm0[13],xmm21[13],xmm0[14],xmm21[14],xmm0[15],xmm21[15]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm21 = zmm11[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm23, %zmm0
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm11 = zmm0[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm11 {%k3}
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm0, %xmm24, %xmm13
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
+; AVX512BW-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm21
+; AVX512BW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm2 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm2, %xmm26, %xmm22
+; AVX512BW-SLOW-NEXT:    vporq %xmm13, %xmm22, %xmm13
+; AVX512BW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm26[8],xmm24[8],xmm26[9],xmm24[9],xmm26[10],xmm24[10],xmm26[11],xmm24[11],xmm26[12],xmm24[12],xmm26[13],xmm24[13],xmm26[14],xmm24[14],xmm26[15],xmm24[15]
+; AVX512BW-SLOW-NEXT:    vpshufb {{.*#+}} xmm22 = xmm22[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm22, %zmm13, %zmm13
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm13 = zmm13[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    movabsq $290499906672591364, %rax # imm = 0x408102040810204
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm13 {%k1}
+; AVX512BW-SLOW-NEXT:    movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm11 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm19, %xmm6, %xmm13
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm25, %xmm5, %xmm19
+; AVX512BW-SLOW-NEXT:    vporq %xmm13, %xmm19, %xmm13
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm17, %xmm5, %xmm5
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm5, %zmm5
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm12, %xmm8, %xmm6
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm27, %xmm7, %xmm12
+; AVX512BW-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm18, %xmm7, %xmm7
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm6, %zmm7, %zmm6
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm5 = zmm5[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm6 = zmm6[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm5, %zmm6 {%k1}
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm2, %xmm1, %xmm2
+; AVX512BW-SLOW-NEXT:    vpor %xmm0, %xmm2, %xmm0
+; AVX512BW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3],xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
+; AVX512BW-SLOW-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
+; AVX512BW-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm0
+; AVX512BW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
+; AVX512BW-SLOW-NEXT:    vpermw %zmm15, %zmm1, %zmm1
+; AVX512BW-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[0,1,0,1,4,5,4,5]
+; AVX512BW-SLOW-NEXT:    movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
+; AVX512BW-SLOW-NEXT:    movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
+; AVX512BW-SLOW-NEXT:    kmovq %rax, %k1
+; AVX512BW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm6 {%k1}
+; AVX512BW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm9, 320(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm11, 256(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm16, 192(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm3, 64(%rax)
+; AVX512BW-SLOW-NEXT:    vmovdqa64 %zmm14, 384(%rax)
+; AVX512BW-SLOW-NEXT:    vzeroupper
+; AVX512BW-SLOW-NEXT:    retq
 ;
 ; AVX512BW-FAST-LABEL: store_i8_stride7_vf64:
 ; AVX512BW-FAST:       # %bb.0:
@@ -9319,320 +8727,6 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512BW-FAST-NEXT:    addq $200, %rsp
 ; AVX512BW-FAST-NEXT:    vzeroupper
 ; AVX512BW-FAST-NEXT:    retq
-;
-; AVX512DQBW-SLOW-LABEL: store_i8_stride7_vf64:
-; AVX512DQBW-SLOW:       # %bb.0:
-; AVX512DQBW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rax), %ymm15
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rax), %ymm2
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm9 = [12,13,2,3,12,13,0,1,14,15,2,3,0,1,14,15,28,29,18,19,28,29,16,17,30,31,18,19,16,17,30,31]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm9, %ymm15, %ymm0
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti128 {{.*#+}} ymm1 = [4,5,4,5,5,6,5,6,4,5,4,5,5,6,5,6]
-; AVX512DQBW-SLOW-NEXT:    # ymm1 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm15, %ymm1, %ymm1
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r9), %ymm10
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm22 = [13,128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm22, %ymm10, %ymm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r8), %ymm11
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm23 = [128,128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm11, %ymm1
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm0, %ymm1, %ymm5
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r9), %xmm4
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%r8), %xmm1
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm6 = xmm1[8],xmm4[8],xmm1[9],xmm4[9],xmm1[10],xmm4[10],xmm1[11],xmm4[11],xmm1[12],xmm4[12],xmm1[13],xmm4[13],xmm1[14],xmm4[14],xmm1[15],xmm4[15]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} xmm6 = xmm6[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm6 = ymm6[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm5, %zmm6, %zmm20
-; AVX512DQBW-SLOW-NEXT:    movabsq $2323999253380730912, %r10 # imm = 0x2040810204081020
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm3, %zmm20 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdx), %ymm16
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} ymm14 = [0,1,0,1,14,128,14,15,0,1,14,15,128,13,14,15,16,17,16,128,30,31,30,31,16,17,128,31,28,29,30,31]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm14, %ymm16, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rcx), %ymm17
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm25 = [128,128,128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm17, %ymm5
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm3, %ymm5, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdx), %xmm5
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rcx), %xmm6
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm7 = xmm6[8],xmm5[8],xmm6[9],xmm5[9],xmm6[10],xmm5[10],xmm6[11],xmm5[11],xmm6[12],xmm5[12],xmm6[13],xmm5[13],xmm6[14],xmm5[14],xmm6[15],xmm5[15]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <6,3,2,u,u,u,9,8,5,4,u,u,u,11,10,7>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm7, %xmm7
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm7 = ymm7[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm7, %zmm21
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdi), %ymm18
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm26 = [0,1,14,128,12,13,0,1,14,15,128,3,12,13,2,3,16,128,30,31,28,29,16,17,128,31,18,19,28,29,18,128]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm18, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rsi), %ymm19
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm27 = [128,128,128,14,128,128,128,128,128,128,15,128,128,128,128,128,128,16,128,128,128,128,128,128,17,128,128,128,128,128,128,18]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm19, %ymm7
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm3, %ymm7, %ymm3
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rdi), %xmm7
-; AVX512DQBW-SLOW-NEXT:    vmovdqa (%rsi), %xmm8
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm24 = xmm8[8],xmm7[8],xmm8[9],xmm7[9],xmm8[10],xmm7[10],xmm8[11],xmm7[11],xmm8[12],xmm7[12],xmm8[13],xmm7[13],xmm8[14],xmm7[14],xmm8[15],xmm7[15]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm13 = <2,u,u,u,9,8,5,4,u,u,u,11,10,7,6,u>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm13, %xmm24, %xmm24
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm3, %zmm24, %zmm3
-; AVX512DQBW-SLOW-NEXT:    movabsq $435749860008887046, %r10 # imm = 0x60C183060C18306
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm3 {%k1}
-; AVX512DQBW-SLOW-NEXT:    movabsq $4066998693416279096, %r10 # imm = 0x3870E1C3870E1C38
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm3 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [10,9,9,10,10,9,9,10,9,10,14,15,10,9,9,10,27,29,28,27,28,29,29,28,27,29,28,27,28,29,29,28]
-; AVX512DQBW-SLOW-NEXT:    vpermw %ymm2, %ymm21, %ymm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm9, %ymm2, %ymm9
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm20, %zmm9, %zmm9
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r9), %ymm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm22, %ymm20, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r8), %ymm22
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm22, %ymm23
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm24, %ymm23, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm23 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,20,128,18,128,128,128,128,21,128,19,128,128,128,128,22,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,128,128,25,128,23,128,128,128,128,26,128,24,128,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm23, %ymm20, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm29 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm22[20],zero,ymm22[18],zero,ymm22[20,21,20,21],zero,ymm22[19],zero,ymm22[19,20,21,22],zero
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm29 = ymm29[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm29, %ymm28, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm28, %zmm24, %zmm28
-; AVX512DQBW-SLOW-NEXT:    movabsq $145249953336295682, %r10 # imm = 0x204081020408102
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm9, %zmm28 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rsi), %ymm29
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdi), %ymm30
-; AVX512DQBW-SLOW-NEXT:    vpshuflw {{.*#+}} ymm9 = ymm30[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm9 = ymm9[0,0,1,1,4,4,5,5]
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm24 = [5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6,5,4,3,6]
-; AVX512DQBW-SLOW-NEXT:    movl $676341840, %r10d # imm = 0x28502850
-; AVX512DQBW-SLOW-NEXT:    kmovd %r10d, %k1
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm24, %ymm29, %ymm9 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm9 = ymm9[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm30, %ymm26
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm29, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm26, %ymm27, %ymm26
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm9, %zmm26, %zmm9
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdx), %ymm31
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm14, %ymm31, %ymm14
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rcx), %ymm0
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm0, %ymm25
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm14, %ymm25, %ymm14
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm26 = <u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,18,128,128,128,128,21,128,19,128,128,128,128,22,128,20,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,128,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm26, %ymm0, %ymm25
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm25 = ymm25[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[18],zero,ymm31[18,19,20,21],zero,ymm31[19],zero,ymm31[25,26,27,22],zero,ymm31[20],zero
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm27, %ymm25, %ymm25
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm25, %zmm14, %zmm14
-; AVX512DQBW-SLOW-NEXT:    movabsq $3485998880071096368, %r10 # imm = 0x3060C183060C1830
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k2
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm14, %zmm9 {%k2}
-; AVX512DQBW-SLOW-NEXT:    movabsq $-4357498600088870461, %r10 # imm = 0xC3870E1C3870E1C3
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm9 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm14 = ymm30[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm14 = ymm14[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastd {{.*#+}} ymm25 = [13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14,13,12,15,14]
-; AVX512DQBW-SLOW-NEXT:    movl $338170920, %r10d # imm = 0x14281428
-; AVX512DQBW-SLOW-NEXT:    kmovd %r10d, %k4
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm29, %ymm14 {%k4}
-; AVX512DQBW-SLOW-NEXT:    vbroadcasti64x2 {{.*#+}} ymm28 = [25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128]
-; AVX512DQBW-SLOW-NEXT:    # ymm28 = mem[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm28, %ymm29, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} ymm29 = [128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,23,128,128,128,128,26,128,24,128,128,128,128,27,128,25]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm29, %ymm30, %ymm30
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm14 = ymm14[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm14, %zmm27, %zmm14
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm27 = zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,zero,ymm31[24,25],zero,ymm31[23],zero,ymm31[21,22,23,26],zero,ymm31[24],zero,ymm31[28,29,26,27]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm27 = ymm27[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm30 = ymm0[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,ymm0[25],zero,ymm0[23],zero,zero,zero,zero,ymm0[26],zero,ymm0[24],zero,zero,zero,zero
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm30 = ymm30[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm27, %ymm30, %ymm30
-; AVX512DQBW-SLOW-NEXT:    vpbroadcastq {{.*#+}} ymm27 = [11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12,11,0,0,0,15,14,13,12]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm0, %ymm0
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm31 = ymm31[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm31 = ymm31[0,2,3,3,4,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm31, %ymm0 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm0 = ymm0[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm0, %zmm30, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r8), %zmm30
-; AVX512DQBW-SLOW-NEXT:    movabsq $1742999440035548184, %r10 # imm = 0x183060C183060C18
-; AVX512DQBW-SLOW-NEXT:    kmovq %r10, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm14 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%r9), %zmm0
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm22 = zmm22[0,1,2,3],zmm0[4,5,6,7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} zmm22 = zmm22[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,24,25,24,25],zero,zmm22[23],zero,zmm22[23,24,25,26],zero,zmm22[24],zero,zmm22[30,31,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,61],zero,zmm22[59],zero,zero,zero,zero,zmm22[62],zero,zmm22[60],zero,zero,zero,zero,zmm22[63],zero
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm22 = zmm22[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm20 = zmm20[0,1,2,3],zmm30[4,5,6,7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} zmm20 = zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zero,zero,zero,zmm20[25],zero,zmm20[23],zero,zero,zero,zero,zmm20[26],zero,zmm20[24],zero,zero,zmm20[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,zmm20[59],zero,zero,zero,zero,zmm20[62],zero,zmm20[60],zero,zero,zero,zero,zmm20[63],zero,zmm20[61]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vporq %zmm22, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rax), %zmm22
-; AVX512DQBW-SLOW-NEXT:    movabsq $6971997760142192736, %rax # imm = 0x60C183060C183060
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k3
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm20 = [11,13,12,11,12,13,13,12,11,13,12,11,12,13,13,12,62,61,62,63,63,62,62,63,62,61,62,63,63,62,62,63]
-; AVX512DQBW-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm20
-; AVX512DQBW-SLOW-NEXT:    movabsq $-9150747060186627967, %rax # imm = 0x8102040810204081
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm20, %zmm14 {%k5}
-; AVX512DQBW-SLOW-NEXT:    vpshuflw {{.*#+}} ymm20 = ymm18[1,2,2,3,4,5,6,7,9,10,10,11,12,13,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm20 = ymm20[0,0,1,1,4,4,5,5]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm24, %ymm19, %ymm20 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm28, %ymm19, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm24 = ymm24[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm29, %ymm18, %ymm28
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm28 = ymm28[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vporq %ymm24, %ymm28, %ymm24
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm20 = ymm20[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm24, %zmm20, %zmm28
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rcx), %zmm20
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm17, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm26, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 (%rdx), %zmm24
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm16, %zmm24, %zmm24
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} zmm24 = zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,18],zero,zmm24[18,19,20,21],zero,zmm24[19],zero,zmm24[25,26,27,22],zero,zmm24[20],zero,zmm24[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57],zero,zmm24[55],zero,zmm24[53,54,55,58],zero,zmm24[56],zero,zmm24[60,61,58,59]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm24 = zmm24[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm20 = zmm20[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vporq %zmm24, %zmm20, %zmm20
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r9), %xmm24
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm28, %zmm20 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%r8), %xmm26
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm10, %zmm0, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vpshufb %zmm23, %zmm0, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdx), %xmm23
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm11, %zmm30, %zmm28
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} zmm28 = zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,20],zero,zmm28[18],zero,zmm28[20,21,20,21],zero,zmm28[19],zero,zmm28[19,20,21,22],zero,zmm28[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,56,57,56,57],zero,zmm28[55],zero,zmm28[55,56,57,58],zero,zmm28[56],zero,zmm28[62,63]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm28 = zmm28[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[2,3,2,3,6,7,6,7]
-; AVX512DQBW-SLOW-NEXT:    vporq %zmm28, %zmm0, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rcx), %xmm28
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm15, %zmm22, %zmm15
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm15, %zmm21, %zmm21
-; AVX512DQBW-SLOW-NEXT:    movabsq $1161999626690365456, %rax # imm = 0x1020408102040810
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm0 {%k5}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 32(%rdi), %xmm21
-; AVX512DQBW-SLOW-NEXT:    movabsq $2033499346708139548, %rax # imm = 0x1C3870E1C3870E1C
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k5
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm20 {%k5}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa 32(%rsi), %xmm0
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm18 = ymm18[0,1,2,3,6,7,7,6,8,9,10,11,14,15,15,14]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm29 = ymm18[2,2,3,3,6,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm25, %ymm19, %ymm29 {%k4}
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm21[0],xmm0[0],xmm21[1],xmm0[1],xmm21[2],xmm0[2],xmm21[3],xmm0[3],xmm21[4],xmm0[4],xmm21[5],xmm0[5],xmm21[6],xmm0[6],xmm21[7],xmm0[7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm18 = <0,1,u,u,u,6,7,2,3,u,u,u,8,9,4,5>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm18, %xmm19, %xmm19
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm19 = zmm29[2,3,2,3],zmm19[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm25 = xmm23[0],xmm28[0],xmm23[1],xmm28[1],xmm23[2],xmm28[2],xmm23[3],xmm28[3],xmm23[4],xmm28[4],xmm23[5],xmm28[5],xmm23[6],xmm28[6],xmm23[7],xmm28[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %ymm27, %ymm17, %ymm27
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm17 = <4,5,0,1,u,u,u,6,7,2,3,u,u,u,8,9>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm17, %xmm25, %xmm25
-; AVX512DQBW-SLOW-NEXT:    vpshufhw {{.*#+}} ymm16 = ymm16[0,1,2,3,7,6,6,7,8,9,10,11,15,14,14,15]
-; AVX512DQBW-SLOW-NEXT:    vpshufd {{.*#+}} ymm16 = ymm16[0,2,3,3,4,6,7,7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %ymm16, %ymm27 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vshufi64x2 {{.*#+}} zmm16 = zmm27[2,3,2,3],zmm25[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm16 {%k2}
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm10 = ymm10[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,29],zero,ymm10[27],zero,zero,zero,zero,ymm10[30],zero,ymm10[28],zero,zero,zero,zero,ymm10[31],zero
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm10 = ymm10[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} ymm11 = ymm11[u,u,u,u,u,u,u,u,u,u,u,u,u,u,u,u],zero,ymm11[27],zero,zero,zero,zero,ymm11[30],zero,ymm11[28],zero,zero,zero,zero,ymm11[31],zero,ymm11[29]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm11 = ymm11[2,3,2,3]
-; AVX512DQBW-SLOW-NEXT:    vpor %ymm10, %ymm11, %ymm11
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm19 = xmm26[0],xmm24[0],xmm26[1],xmm24[1],xmm26[2],xmm24[2],xmm26[3],xmm24[3],xmm26[4],xmm24[4],xmm26[5],xmm24[5],xmm26[6],xmm24[6],xmm26[7],xmm24[7]
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm10 = <u,u,4,5,0,1,u,u,u,6,7,2,3,u,u,u>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm10, %xmm19, %xmm19
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} ymm19 = ymm19[0,1,0,1]
-; AVX512DQBW-SLOW-NEXT:    vinserti64x4 $1, %ymm19, %zmm11, %zmm11
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm19 = [14,13,14,15,15,14,14,15,14,13,14,15,15,14,14,15,17,17,16,16,17,17,16,16,20,21,17,17,17,17,16,16]
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm22, %zmm19, %zmm19
-; AVX512DQBW-SLOW-NEXT:    movabsq $580999813345182728, %rax # imm = 0x810204081020408
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm19, %zmm11 {%k1}
-; AVX512DQBW-SLOW-NEXT:    movabsq $1016749673354069774, %rax # imm = 0xE1C3870E1C3870E
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm11, %zmm16 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm19 = <u,u,u,128,7,128,5,u,u,u,128,8,128,6,u,u>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm19, %xmm28, %xmm11
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm25 = <u,u,u,7,128,5,128,u,u,u,8,128,6,128,u,u>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm25, %xmm23, %xmm27
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm11, %xmm27, %xmm11
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm23 = xmm28[8],xmm23[8],xmm28[9],xmm23[9],xmm28[10],xmm23[10],xmm28[11],xmm23[11],xmm28[12],xmm23[12],xmm28[13],xmm23[13],xmm28[14],xmm23[14],xmm28[15],xmm23[15]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm23, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm12, %zmm11, %zmm11
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm12 = <u,128,7,128,5,u,u,u,128,8,128,6,u,u,u,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm0, %xmm23
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} xmm27 = <u,7,128,5,128,u,u,u,8,128,6,128,u,u,u,9>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm27, %xmm21, %xmm28
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm23, %xmm28, %xmm23
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm0 = xmm0[8],xmm21[8],xmm0[9],xmm21[9],xmm0[10],xmm21[10],xmm0[11],xmm21[11],xmm0[12],xmm21[12],xmm0[13],xmm21[13],xmm0[14],xmm21[14],xmm0[15],xmm21[15]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm21 = zmm11[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm13, %xmm0, %xmm0
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm23, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm11 = zmm0[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm11 {%k3}
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm0 = <128,4,u,u,u,128,7,128,5,u,u,u,128,8,128,6>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm0, %xmm24, %xmm13
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm21 = [2,2,2,4,2,2,2,4,3,3,3,3,2,2,2,4,52,53,52,53,53,54,53,54,52,53,52,53,53,54,53,54]
-; AVX512DQBW-SLOW-NEXT:    vpermi2w %zmm22, %zmm2, %zmm21
-; AVX512DQBW-SLOW-NEXT:    vmovdqa {{.*#+}} xmm2 = <4,128,u,u,u,7,128,5,128,u,u,u,8,128,6,128>
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm2, %xmm26, %xmm22
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm13, %xmm22, %xmm13
-; AVX512DQBW-SLOW-NEXT:    vpunpckhbw {{.*#+}} xmm22 = xmm26[8],xmm24[8],xmm26[9],xmm24[9],xmm26[10],xmm24[10],xmm26[11],xmm24[11],xmm26[12],xmm24[12],xmm26[13],xmm24[13],xmm26[14],xmm24[14],xmm26[15],xmm24[15]
-; AVX512DQBW-SLOW-NEXT:    vpshufb {{.*#+}} xmm22 = xmm22[u,6,7,2,3,u,u,u,8,9,4,5,u,u,u,10]
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm22, %zmm13, %zmm13
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm13 = zmm13[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    movabsq $290499906672591364, %rax # imm = 0x408102040810204
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm21, %zmm13 {%k1}
-; AVX512DQBW-SLOW-NEXT:    movabsq $-8714997200177740921, %rax # imm = 0x870E1C3870E1C387
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm13, %zmm11 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm19, %xmm6, %xmm13
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm25, %xmm5, %xmm19
-; AVX512DQBW-SLOW-NEXT:    vporq %xmm13, %xmm19, %xmm13
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm5 = xmm5[0],xmm6[0],xmm5[1],xmm6[1],xmm5[2],xmm6[2],xmm5[3],xmm6[3],xmm5[4],xmm6[4],xmm5[5],xmm6[5],xmm5[6],xmm6[6],xmm5[7],xmm6[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm17, %xmm5, %xmm5
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm13, %zmm5, %zmm5
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm12, %xmm8, %xmm6
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm27, %xmm7, %xmm12
-; AVX512DQBW-SLOW-NEXT:    vpor %xmm6, %xmm12, %xmm6
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm7 = xmm7[0],xmm8[0],xmm7[1],xmm8[1],xmm7[2],xmm8[2],xmm7[3],xmm8[3],xmm7[4],xmm8[4],xmm7[5],xmm8[5],xmm7[6],xmm8[6],xmm7[7],xmm8[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm18, %xmm7, %xmm7
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm6, %zmm7, %zmm6
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm5 = zmm5[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm6 = zmm6[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    movabsq $871499720017774092, %rax # imm = 0xC183060C183060C
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm5, %zmm6 {%k1}
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm0, %xmm4, %xmm0
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm2, %xmm1, %xmm2
-; AVX512DQBW-SLOW-NEXT:    vpor %xmm0, %xmm2, %xmm0
-; AVX512DQBW-SLOW-NEXT:    vpunpcklbw {{.*#+}} xmm1 = xmm1[0],xmm4[0],xmm1[1],xmm4[1],xmm1[2],xmm4[2],xmm1[3],xmm4[3],xmm1[4],xmm4[4],xmm1[5],xmm4[5],xmm1[6],xmm4[6],xmm1[7],xmm4[7]
-; AVX512DQBW-SLOW-NEXT:    vpshufb %xmm10, %xmm1, %xmm1
-; AVX512DQBW-SLOW-NEXT:    vinserti32x4 $2, %xmm0, %zmm1, %zmm0
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 {{.*#+}} zmm1 = [1,1,0,0,1,1,0,0,4,5,1,1,1,1,0,0,18,18,18,20,18,18,18,20,19,19,19,19,18,18,18,20]
-; AVX512DQBW-SLOW-NEXT:    vpermw %zmm15, %zmm1, %zmm1
-; AVX512DQBW-SLOW-NEXT:    vpermq {{.*#+}} zmm0 = zmm0[0,1,0,1,4,5,4,5]
-; AVX512DQBW-SLOW-NEXT:    movabsq $4647998506761461824, %rax # imm = 0x4081020408102040
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm1, %zmm0 {%k1}
-; AVX512DQBW-SLOW-NEXT:    movabsq $8133997386832558192, %rax # imm = 0x70E1C3870E1C3870
-; AVX512DQBW-SLOW-NEXT:    kmovq %rax, %k1
-; AVX512DQBW-SLOW-NEXT:    vmovdqu8 %zmm0, %zmm6 {%k1}
-; AVX512DQBW-SLOW-NEXT:    movq {{[0-9]+}}(%rsp), %rax
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm6, (%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm9, 320(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm11, 256(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm16, 192(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm20, 128(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm3, 64(%rax)
-; AVX512DQBW-SLOW-NEXT:    vmovdqa64 %zmm14, 384(%rax)
-; AVX512DQBW-SLOW-NEXT:    vzeroupper
-; AVX512DQBW-SLOW-NEXT:    retq
   %in.vec0 = load <64 x i8>, ptr %in.vecptr0, align 64
   %in.vec1 = load <64 x i8>, ptr %in.vecptr1, align 64
   %in.vec2 = load <64 x i8>, ptr %in.vecptr2, align 64
@@ -9660,10 +8754,16 @@ define void @store_i8_stride7_vf64(ptr %in.vecptr0, ptr %in.vecptr1, ptr %in.vec
 ; AVX512-SLOW: {{.*}}
 ; AVX512BW-ONLY: {{.*}}
 ; AVX512BW-ONLY-FAST: {{.*}}
+; AVX512BW-ONLY-SLOW: {{.*}}
+; AVX512DQ-FAST: {{.*}}
 ; AVX512DQ-ONLY: {{.*}}
+; AVX512DQ-SLOW: {{.*}}
 ; AVX512DQBW-FAST: {{.*}}
 ; AVX512DQBW-ONLY: {{.*}}
+; AVX512DQBW-SLOW: {{.*}}
 ; AVX512F-ONLY: {{.*}}
+; AVX512F-ONLY-FAST: {{.*}}
+; AVX512F-ONLY-SLOW: {{.*}}
 ; FALLBACK0: {{.*}}
 ; FALLBACK1: {{.*}}
 ; FALLBACK10: {{.*}}

diff  --git a/llvm/test/CodeGen/X86/vector-lzcnt-512.ll b/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
index 3c5e3adf038faae..efecfa47eb4abb5 100644
--- a/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
+++ b/llvm/test/CodeGen/X86/vector-lzcnt-512.ll
@@ -31,8 +31,8 @@ define <8 x i64> @testv8i64(<8 x i64> %in) nounwind {
 ; AVX512BW-NEXT:    vporq %zmm1, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpbroadcastb {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
 ; AVX512BW-NEXT:    vpandnq %zmm1, %zmm0, %zmm2
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
-; AVX512BW-NEXT:    # zmm3 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm2, %zmm3, %zmm2
 ; AVX512BW-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm0
@@ -110,8 +110,8 @@ define <8 x i64> @testv8i64u(<8 x i64> %in) nounwind {
 ; AVX512BW-NEXT:    vporq %zmm1, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpbroadcastb {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
 ; AVX512BW-NEXT:    vpandnq %zmm1, %zmm0, %zmm2
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
-; AVX512BW-NEXT:    # zmm3 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm2, %zmm3, %zmm2
 ; AVX512BW-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm0
@@ -187,8 +187,8 @@ define <16 x i32> @testv16i32(<16 x i32> %in) nounwind {
 ; AVX512BW-NEXT:    vpord %zmm1, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpbroadcastb {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
 ; AVX512BW-NEXT:    vpandnq %zmm1, %zmm0, %zmm2
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
-; AVX512BW-NEXT:    # zmm3 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm2, %zmm3, %zmm2
 ; AVX512BW-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm0
@@ -274,8 +274,8 @@ define <16 x i32> @testv16i32u(<16 x i32> %in) nounwind {
 ; AVX512BW-NEXT:    vpord %zmm1, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpbroadcastb {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15]
 ; AVX512BW-NEXT:    vpandnq %zmm1, %zmm0, %zmm2
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
-; AVX512BW-NEXT:    # zmm3 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4]
+; AVX512BW-NEXT:    # zmm3 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm2, %zmm3, %zmm2
 ; AVX512BW-NEXT:    vpternlogq $15, %zmm0, %zmm0, %zmm0
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm0
@@ -369,8 +369,8 @@ define <32 x i16> @testv32i16(<32 x i16> %in) nounwind {
 ; AVX512BW:       # %bb.0:
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm1
 ; AVX512BW-NEXT:    vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm1, %zmm1
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
-; AVX512BW-NEXT:    # zmm2 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm1, %zmm2, %zmm3
 ; AVX512BW-NEXT:    vpshufb %zmm0, %zmm2, %zmm2
 ; AVX512BW-NEXT:    vptestnmb %zmm1, %zmm1, %k0
@@ -455,8 +455,8 @@ define <32 x i16> @testv32i16u(<32 x i16> %in) nounwind {
 ; AVX512BW:       # %bb.0:
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm1
 ; AVX512BW-NEXT:    vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm1, %zmm1
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
-; AVX512BW-NEXT:    # zmm2 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm1, %zmm2, %zmm3
 ; AVX512BW-NEXT:    vpshufb %zmm0, %zmm2, %zmm2
 ; AVX512BW-NEXT:    vptestnmb %zmm1, %zmm1, %k0
@@ -561,8 +561,8 @@ define <64 x i8> @testv64i8(<64 x i8> %in) nounwind {
 ; AVX512BW:       # %bb.0:
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm1
 ; AVX512BW-NEXT:    vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm1, %zmm1
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
-; AVX512BW-NEXT:    # zmm2 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm1, %zmm2, %zmm3
 ; AVX512BW-NEXT:    vpshufb %zmm0, %zmm2, %zmm0
 ; AVX512BW-NEXT:    vptestnmb %zmm1, %zmm1, %k0
@@ -651,8 +651,8 @@ define <64 x i8> @testv64i8u(<64 x i8> %in) nounwind {
 ; AVX512BW:       # %bb.0:
 ; AVX512BW-NEXT:    vpsrlw $4, %zmm0, %zmm1
 ; AVX512BW-NEXT:    vpandd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm1, %zmm1
-; AVX512BW-NEXT:    vbroadcasti64x2 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
-; AVX512BW-NEXT:    # zmm2 = mem[0,1,0,1,0,1,0,1]
+; AVX512BW-NEXT:    vbroadcasti32x4 {{.*#+}} zmm2 = [4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0,4,3,2,2,1,1,1,1,0,0,0,0,0,0,0,0]
+; AVX512BW-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3,0,1,2,3,0,1,2,3]
 ; AVX512BW-NEXT:    vpshufb %zmm1, %zmm2, %zmm3
 ; AVX512BW-NEXT:    vpshufb %zmm0, %zmm2, %zmm0
 ; AVX512BW-NEXT:    vptestnmb %zmm1, %zmm1, %k0

diff  --git a/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast.ll b/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast.ll
index 3ce4bb3306ab0f7..86e878261dccede 100644
--- a/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast.ll
+++ b/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast.ll
@@ -5256,8 +5256,8 @@ define void @vec384_i32_widen_to_i192_factor6_broadcast_to_v2i192_factor2(ptr %i
 ; AVX512DQ-NEXT:    vpaddb 32(%rsi), %ymm1, %ymm1
 ; AVX512DQ-NEXT:    vpaddb (%rsi), %ymm0, %ymm0
 ; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
-; AVX512DQ-NEXT:    vbroadcasti32x8 {{.*#+}} zmm1 = [16,29,30,31,4,5,16,7,16,29,30,31,4,5,16,7]
-; AVX512DQ-NEXT:    # zmm1 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
+; AVX512DQ-NEXT:    vbroadcasti64x4 {{.*#+}} zmm1 = [16,29,30,31,4,5,16,7,16,29,30,31,4,5,16,7]
+; AVX512DQ-NEXT:    # zmm1 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-NEXT:    vpxor %xmm2, %xmm2, %xmm2
 ; AVX512DQ-NEXT:    vpermt2d %zmm0, %zmm1, %zmm2
 ; AVX512DQ-NEXT:    vpaddb (%rdx), %ymm2, %ymm0
@@ -5540,8 +5540,8 @@ define void @vec384_i64_widen_to_i192_factor3_broadcast_to_v2i192_factor2(ptr %i
 ; AVX512DQ-NEXT:    vpaddb (%rsi), %ymm0, %ymm0
 ; AVX512DQ-NEXT:    vinserti64x4 $1, %ymm1, %zmm0, %zmm0
 ; AVX512DQ-NEXT:    vpxor %xmm1, %xmm1, %xmm1
-; AVX512DQ-NEXT:    vbroadcasti32x8 {{.*#+}} zmm2 = [0,7,10,0,0,7,10,0]
-; AVX512DQ-NEXT:    # zmm2 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
+; AVX512DQ-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,7,10,0,0,7,10,0]
+; AVX512DQ-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-NEXT:    vpermi2q %zmm1, %zmm0, %zmm2
 ; AVX512DQ-NEXT:    vpaddb (%rdx), %ymm2, %ymm0
 ; AVX512DQ-NEXT:    vmovaps 32(%rdx), %ymm1

diff  --git a/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast_from_memory.ll b/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast_from_memory.ll
index 134908f4c7392c7..b88e2921484d9b1 100644
--- a/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast_from_memory.ll
+++ b/llvm/test/CodeGen/X86/zero_extend_vector_inreg_of_broadcast_from_memory.ll
@@ -4217,8 +4217,8 @@ define void @vec384_i32_widen_to_i192_factor6_broadcast_to_v2i192_factor2(ptr %i
 ;
 ; AVX512DQ-LABEL: vec384_i32_widen_to_i192_factor6_broadcast_to_v2i192_factor2:
 ; AVX512DQ:       # %bb.0:
-; AVX512DQ-NEXT:    vbroadcasti32x8 {{.*#+}} zmm0 = [16,29,30,31,4,5,16,7,16,29,30,31,4,5,16,7]
-; AVX512DQ-NEXT:    # zmm0 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
+; AVX512DQ-NEXT:    vbroadcasti64x4 {{.*#+}} zmm0 = [16,29,30,31,4,5,16,7,16,29,30,31,4,5,16,7]
+; AVX512DQ-NEXT:    # zmm0 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-NEXT:    vpxor %xmm1, %xmm1, %xmm1
 ; AVX512DQ-NEXT:    vpermt2d (%rdi), %zmm0, %zmm1
 ; AVX512DQ-NEXT:    vpaddb (%rsi), %ymm1, %ymm0
@@ -4439,8 +4439,8 @@ define void @vec384_i64_widen_to_i192_factor3_broadcast_to_v2i192_factor2(ptr %i
 ; AVX512DQ:       # %bb.0:
 ; AVX512DQ-NEXT:    vmovdqa64 (%rdi), %zmm0
 ; AVX512DQ-NEXT:    vpxor %xmm1, %xmm1, %xmm1
-; AVX512DQ-NEXT:    vbroadcasti32x8 {{.*#+}} zmm2 = [0,7,10,0,0,7,10,0]
-; AVX512DQ-NEXT:    # zmm2 = mem[0,1,2,3,4,5,6,7,0,1,2,3,4,5,6,7]
+; AVX512DQ-NEXT:    vbroadcasti64x4 {{.*#+}} zmm2 = [0,7,10,0,0,7,10,0]
+; AVX512DQ-NEXT:    # zmm2 = mem[0,1,2,3,0,1,2,3]
 ; AVX512DQ-NEXT:    vpermi2q %zmm1, %zmm0, %zmm2
 ; AVX512DQ-NEXT:    vpaddb (%rsi), %ymm2, %ymm0
 ; AVX512DQ-NEXT:    vmovaps 32(%rsi), %ymm1


        


More information about the llvm-commits mailing list