[llvm] 45abcd1 - [AVX] Cleanup check prefixes

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Thu Feb 27 04:27:45 PST 2020


Author: Simon Pilgrim
Date: 2020-02-27T12:27:33Z
New Revision: 45abcd1df1ae6c42f7b65afe4344d2c435141ccc

URL: https://github.com/llvm/llvm-project/commit/45abcd1df1ae6c42f7b65afe4344d2c435141ccc
DIFF: https://github.com/llvm/llvm-project/commit/45abcd1df1ae6c42f7b65afe4344d2c435141ccc.diff

LOG: [AVX] Cleanup check prefixes

We were missing some coverage for avx512bw targets as the update script can be pretty dumb about check prefix orders...

Added: 
    

Modified: 
    llvm/test/CodeGen/X86/var-permute-512.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/var-permute-512.ll b/llvm/test/CodeGen/X86/var-permute-512.ll
index a39a1ee15985..ad1eddc15ac9 100644
--- a/llvm/test/CodeGen/X86/var-permute-512.ll
+++ b/llvm/test/CodeGen/X86/var-permute-512.ll
@@ -1,7 +1,7 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
-; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX512,NOBW,NOVBMI,AVX512F
-; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefixes=AVX512,NOVBMI,AVX512BW
-; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vbmi | FileCheck %s --check-prefixes=AVX512,AVX512BW,VBMI
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512f | FileCheck %s --check-prefixes=AVX512,AVX512F
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw | FileCheck %s --check-prefixes=AVX512,AVX512BW
+; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx512bw,+avx512vbmi | FileCheck %s --check-prefixes=AVX512,AVX512VBMI
 
 define <8 x i64> @var_shuffle_v8i64(<8 x i64> %v, <8 x i64> %indices) nounwind {
 ; AVX512-LABEL: var_shuffle_v8i64:
@@ -92,190 +92,195 @@ define <16 x i32> @var_shuffle_v16i32(<16 x i32> %v, <16 x i32> %indices) nounwi
 }
 
 define <32 x i16> @var_shuffle_v32i16(<32 x i16> %v, <32 x i16> %indices) nounwind {
-; NOBW-LABEL: var_shuffle_v32i16:
-; NOBW:       # %bb.0:
-; NOBW-NEXT:    pushq %rbp
-; NOBW-NEXT:    movq %rsp, %rbp
-; NOBW-NEXT:    andq $-64, %rsp
-; NOBW-NEXT:    subq $2112, %rsp # imm = 0x840
-; NOBW-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
-; NOBW-NEXT:    vextracti128 $1, %ymm1, %xmm3
-; NOBW-NEXT:    vextracti128 $1, %ymm2, %xmm4
-; NOBW-NEXT:    vmovd %xmm4, %eax
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, (%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    movzwl 1536(%rsp,%rax,2), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm0
-; NOBW-NEXT:    vpextrw $1, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $1, 1600(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $2, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $2, 1664(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $3, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $3, 1728(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $4, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $4, 1792(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $5, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $5, 1856(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $6, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $6, 1920(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrw $7, %xmm4, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $7, 1984(%rsp,%rax,2), %xmm0, %xmm0
-; NOBW-NEXT:    vmovd %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    movzwl 1024(%rsp,%rax,2), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrw $1, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $1, 1088(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $2, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $2, 1152(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $3, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $3, 1216(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $4, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $4, 1280(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $5, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $5, 1344(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $6, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $6, 1408(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $7, %xmm2, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $7, 1472(%rsp,%rax,2), %xmm4, %xmm2
-; NOBW-NEXT:    vmovd %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    movzwl 512(%rsp,%rax,2), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrw $1, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $1, 576(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $2, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $2, 640(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $3, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $3, 704(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $4, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $4, 768(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $5, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $5, 832(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $6, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $6, 896(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $7, %xmm3, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $7, 960(%rsp,%rax,2), %xmm4, %xmm3
-; NOBW-NEXT:    vmovd %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    movzwl (%rsp,%rax,2), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrw $1, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $1, 64(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $2, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $2, 128(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $3, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $3, 192(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $4, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $4, 256(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $5, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $5, 320(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $6, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $6, 384(%rsp,%rax,2), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrw $7, %xmm1, %eax
-; NOBW-NEXT:    andl $31, %eax
-; NOBW-NEXT:    vpinsrw $7, 448(%rsp,%rax,2), %xmm4, %xmm1
-; NOBW-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; NOBW-NEXT:    vinserti128 $1, %xmm3, %ymm1, %ymm1
-; NOBW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; NOBW-NEXT:    movq %rbp, %rsp
-; NOBW-NEXT:    popq %rbp
-; NOBW-NEXT:    retq
+; AVX512F-LABEL: var_shuffle_v32i16:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    pushq %rbp
+; AVX512F-NEXT:    movq %rsp, %rbp
+; AVX512F-NEXT:    andq $-64, %rsp
+; AVX512F-NEXT:    subq $2112, %rsp # imm = 0x840
+; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
+; AVX512F-NEXT:    vextracti128 $1, %ymm1, %xmm3
+; AVX512F-NEXT:    vextracti128 $1, %ymm2, %xmm4
+; AVX512F-NEXT:    vmovd %xmm4, %eax
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, (%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    movzwl 1536(%rsp,%rax,2), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm0
+; AVX512F-NEXT:    vpextrw $1, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $1, 1600(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $2, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $2, 1664(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $3, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $3, 1728(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $4, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $4, 1792(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $5, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $5, 1856(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $6, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $6, 1920(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrw $7, %xmm4, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $7, 1984(%rsp,%rax,2), %xmm0, %xmm0
+; AVX512F-NEXT:    vmovd %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    movzwl 1024(%rsp,%rax,2), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrw $1, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $1, 1088(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $2, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $2, 1152(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $3, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $3, 1216(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $4, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $4, 1280(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $5, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $5, 1344(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $6, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $6, 1408(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $7, %xmm2, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $7, 1472(%rsp,%rax,2), %xmm4, %xmm2
+; AVX512F-NEXT:    vmovd %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    movzwl 512(%rsp,%rax,2), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrw $1, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $1, 576(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $2, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $2, 640(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $3, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $3, 704(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $4, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $4, 768(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $5, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $5, 832(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $6, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $6, 896(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $7, %xmm3, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $7, 960(%rsp,%rax,2), %xmm4, %xmm3
+; AVX512F-NEXT:    vmovd %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    movzwl (%rsp,%rax,2), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrw $1, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $1, 64(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $2, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $2, 128(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $3, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $3, 192(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $4, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $4, 256(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $5, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $5, 320(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $6, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $6, 384(%rsp,%rax,2), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrw $7, %xmm1, %eax
+; AVX512F-NEXT:    andl $31, %eax
+; AVX512F-NEXT:    vpinsrw $7, 448(%rsp,%rax,2), %xmm4, %xmm1
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
+; AVX512F-NEXT:    vinserti128 $1, %xmm3, %ymm1, %ymm1
+; AVX512F-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-NEXT:    movq %rbp, %rsp
+; AVX512F-NEXT:    popq %rbp
+; AVX512F-NEXT:    retq
 ;
 ; AVX512BW-LABEL: var_shuffle_v32i16:
 ; AVX512BW:       # %bb.0:
 ; AVX512BW-NEXT:    vpermw %zmm0, %zmm1, %zmm0
 ; AVX512BW-NEXT:    retq
+;
+; AVX512VBMI-LABEL: var_shuffle_v32i16:
+; AVX512VBMI:       # %bb.0:
+; AVX512VBMI-NEXT:    vpermw %zmm0, %zmm1, %zmm0
+; AVX512VBMI-NEXT:    retq
   %index0 = extractelement <32 x i16> %indices, i32 0
   %index1 = extractelement <32 x i16> %indices, i32 1
   %index2 = extractelement <32 x i16> %indices, i32 2
@@ -376,350 +381,575 @@ define <32 x i16> @var_shuffle_v32i16(<32 x i16> %v, <32 x i16> %indices) nounwi
 }
 
 define <64 x i8> @var_shuffle_v64i8(<64 x i8> %v, <64 x i8> %indices) nounwind {
-; NOBW-LABEL: var_shuffle_v64i8:
-; NOBW:       # %bb.0:
-; NOBW-NEXT:    pushq %rbp
-; NOBW-NEXT:    movq %rsp, %rbp
-; NOBW-NEXT:    andq $-64, %rsp
-; NOBW-NEXT:    subq $4160, %rsp # imm = 0x1040
-; NOBW-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
-; NOBW-NEXT:    vextracti128 $1, %ymm1, %xmm3
-; NOBW-NEXT:    vextracti128 $1, %ymm2, %xmm4
-; NOBW-NEXT:    vpextrb $0, %xmm4, %eax
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, (%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
-; NOBW-NEXT:    movzbl 3072(%rsp,%rax), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm0
-; NOBW-NEXT:    vpextrb $1, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $1, 3136(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $2, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $2, 3200(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $3, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $3, 3264(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $4, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $4, 3328(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $5, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $5, 3392(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $6, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $6, 3456(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $7, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $7, 3520(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $8, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $8, 3584(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $9, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $9, 3648(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $10, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $10, 3712(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $11, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $11, 3776(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $12, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $12, 3840(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $13, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $13, 3904(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $14, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $14, 3968(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $15, %xmm4, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $15, 4032(%rsp,%rax), %xmm0, %xmm0
-; NOBW-NEXT:    vpextrb $0, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    movzbl 2048(%rsp,%rax), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrb $1, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $1, 2112(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $2, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $2, 2176(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $3, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $3, 2240(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $4, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $4, 2304(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $5, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $5, 2368(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $6, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $6, 2432(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $7, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $7, 2496(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $8, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $8, 2560(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $9, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $9, 2624(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $10, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $10, 2688(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $11, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $11, 2752(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $12, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $12, 2816(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $13, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $13, 2880(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $14, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $14, 2944(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $15, %xmm2, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $15, 3008(%rsp,%rax), %xmm4, %xmm2
-; NOBW-NEXT:    vpextrb $0, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    movzbl 1024(%rsp,%rax), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrb $1, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $1, 1088(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $2, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $2, 1152(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $3, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $3, 1216(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $4, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $4, 1280(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $5, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $5, 1344(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $6, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $6, 1408(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $7, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $7, 1472(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $8, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $8, 1536(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $9, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $9, 1600(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $10, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $10, 1664(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $11, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $11, 1728(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $12, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $12, 1792(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $13, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $13, 1856(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $14, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $14, 1920(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $15, %xmm3, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $15, 1984(%rsp,%rax), %xmm4, %xmm3
-; NOBW-NEXT:    vpextrb $0, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    movzbl (%rsp,%rax), %eax
-; NOBW-NEXT:    vmovd %eax, %xmm4
-; NOBW-NEXT:    vpextrb $1, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $1, 64(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $2, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $2, 128(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $3, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $3, 192(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $4, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $4, 256(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $5, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $5, 320(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $6, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $6, 384(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $7, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $7, 448(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $8, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $8, 512(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $9, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $9, 576(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $10, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $10, 640(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $11, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $11, 704(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $12, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $12, 768(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $13, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $13, 832(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $14, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $14, 896(%rsp,%rax), %xmm4, %xmm4
-; NOBW-NEXT:    vpextrb $15, %xmm1, %eax
-; NOBW-NEXT:    andl $63, %eax
-; NOBW-NEXT:    vpinsrb $15, 960(%rsp,%rax), %xmm4, %xmm1
-; NOBW-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
-; NOBW-NEXT:    vinserti128 $1, %xmm3, %ymm1, %ymm1
-; NOBW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
-; NOBW-NEXT:    movq %rbp, %rsp
-; NOBW-NEXT:    popq %rbp
-; NOBW-NEXT:    retq
+; AVX512F-LABEL: var_shuffle_v64i8:
+; AVX512F:       # %bb.0:
+; AVX512F-NEXT:    pushq %rbp
+; AVX512F-NEXT:    movq %rsp, %rbp
+; AVX512F-NEXT:    andq $-64, %rsp
+; AVX512F-NEXT:    subq $4160, %rsp # imm = 0x1040
+; AVX512F-NEXT:    vextracti64x4 $1, %zmm1, %ymm2
+; AVX512F-NEXT:    vextracti128 $1, %ymm1, %xmm3
+; AVX512F-NEXT:    vextracti128 $1, %ymm2, %xmm4
+; AVX512F-NEXT:    vpextrb $0, %xmm4, %eax
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, (%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vextractf64x4 $1, %zmm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    vmovaps %ymm0, {{[0-9]+}}(%rsp)
+; AVX512F-NEXT:    movzbl 3072(%rsp,%rax), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm0
+; AVX512F-NEXT:    vpextrb $1, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $1, 3136(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $2, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $2, 3200(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $3, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $3, 3264(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $4, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $4, 3328(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $5, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $5, 3392(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $6, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $6, 3456(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $7, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $7, 3520(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $8, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $8, 3584(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $9, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $9, 3648(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $10, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $10, 3712(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $11, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $11, 3776(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $12, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $12, 3840(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $13, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $13, 3904(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $14, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $14, 3968(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $15, %xmm4, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $15, 4032(%rsp,%rax), %xmm0, %xmm0
+; AVX512F-NEXT:    vpextrb $0, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    movzbl 2048(%rsp,%rax), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrb $1, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $1, 2112(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $2, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $2, 2176(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $3, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $3, 2240(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $4, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $4, 2304(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $5, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $5, 2368(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $6, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $6, 2432(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $7, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $7, 2496(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $8, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $8, 2560(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $9, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $9, 2624(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $10, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $10, 2688(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $11, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $11, 2752(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $12, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $12, 2816(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $13, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $13, 2880(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $14, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $14, 2944(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $15, %xmm2, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $15, 3008(%rsp,%rax), %xmm4, %xmm2
+; AVX512F-NEXT:    vpextrb $0, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    movzbl 1024(%rsp,%rax), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrb $1, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $1, 1088(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $2, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $2, 1152(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $3, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $3, 1216(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $4, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $4, 1280(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $5, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $5, 1344(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $6, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $6, 1408(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $7, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $7, 1472(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $8, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $8, 1536(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $9, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $9, 1600(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $10, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $10, 1664(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $11, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $11, 1728(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $12, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $12, 1792(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $13, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $13, 1856(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $14, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $14, 1920(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $15, %xmm3, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $15, 1984(%rsp,%rax), %xmm4, %xmm3
+; AVX512F-NEXT:    vpextrb $0, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512F-NEXT:    vmovd %eax, %xmm4
+; AVX512F-NEXT:    vpextrb $1, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $1, 64(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $2, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $2, 128(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $3, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $3, 192(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $4, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $4, 256(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $5, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $5, 320(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $6, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $6, 384(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $7, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $7, 448(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $8, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $8, 512(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $9, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $9, 576(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $10, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $10, 640(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $11, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $11, 704(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $12, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $12, 768(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $13, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $13, 832(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $14, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $14, 896(%rsp,%rax), %xmm4, %xmm4
+; AVX512F-NEXT:    vpextrb $15, %xmm1, %eax
+; AVX512F-NEXT:    andl $63, %eax
+; AVX512F-NEXT:    vpinsrb $15, 960(%rsp,%rax), %xmm4, %xmm1
+; AVX512F-NEXT:    vinserti128 $1, %xmm0, %ymm2, %ymm0
+; AVX512F-NEXT:    vinserti128 $1, %xmm3, %ymm1, %ymm1
+; AVX512F-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512F-NEXT:    movq %rbp, %rsp
+; AVX512F-NEXT:    popq %rbp
+; AVX512F-NEXT:    retq
+;
+; AVX512BW-LABEL: var_shuffle_v64i8:
+; AVX512BW:       # %bb.0:
+; AVX512BW-NEXT:    pushq %rbp
+; AVX512BW-NEXT:    movq %rsp, %rbp
+; AVX512BW-NEXT:    andq $-64, %rsp
+; AVX512BW-NEXT:    subq $128, %rsp
+; AVX512BW-NEXT:    vextracti128 $1, %ymm1, %xmm2
+; AVX512BW-NEXT:    vextracti32x4 $2, %zmm1, %xmm3
+; AVX512BW-NEXT:    vextracti32x4 $3, %zmm1, %xmm4
+; AVX512BW-NEXT:    vpextrb $0, %xmm4, %eax
+; AVX512BW-NEXT:    vmovaps %zmm0, (%rsp)
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vmovd %eax, %xmm0
+; AVX512BW-NEXT:    vpextrb $1, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $1, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $2, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $2, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $3, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $3, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $4, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $4, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $5, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $5, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $6, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $6, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $7, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $7, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $8, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $8, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $9, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $9, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $10, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $10, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $11, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $11, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $12, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $12, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $13, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $13, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $14, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $14, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $15, %xmm4, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $15, (%rsp,%rax), %xmm0, %xmm0
+; AVX512BW-NEXT:    vpextrb $0, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vmovd %eax, %xmm4
+; AVX512BW-NEXT:    vpextrb $1, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $1, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $2, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $2, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $3, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $3, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $4, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $4, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $5, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $5, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $6, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $6, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $7, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $7, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $8, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $8, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $9, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $9, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $10, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $10, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $11, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $11, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $12, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $12, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $13, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $13, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $14, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $14, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $15, %xmm3, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $15, %eax, %xmm4, %xmm3
+; AVX512BW-NEXT:    vpextrb $0, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vmovd %eax, %xmm4
+; AVX512BW-NEXT:    vpextrb $1, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $1, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $2, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $2, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $3, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $3, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $4, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $4, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $5, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $5, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $6, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $6, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $7, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $7, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $8, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $8, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $9, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $9, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $10, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $10, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $11, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $11, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $12, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $12, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $13, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $13, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $14, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $14, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $15, %xmm2, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $15, %eax, %xmm4, %xmm2
+; AVX512BW-NEXT:    vpextrb $0, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vmovd %eax, %xmm4
+; AVX512BW-NEXT:    vpextrb $1, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $1, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $2, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $2, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $3, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $3, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $4, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $4, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $5, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $5, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $6, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $6, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $7, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $7, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $8, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $8, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $9, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $9, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $10, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    vpinsrb $10, (%rsp,%rax), %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $11, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $11, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $12, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $12, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $13, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $13, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $14, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $14, %eax, %xmm4, %xmm4
+; AVX512BW-NEXT:    vpextrb $15, %xmm1, %eax
+; AVX512BW-NEXT:    andl $63, %eax
+; AVX512BW-NEXT:    movzbl (%rsp,%rax), %eax
+; AVX512BW-NEXT:    vpinsrb $15, %eax, %xmm4, %xmm1
+; AVX512BW-NEXT:    vinserti128 $1, %xmm0, %ymm3, %ymm0
+; AVX512BW-NEXT:    vinserti128 $1, %xmm2, %ymm1, %ymm1
+; AVX512BW-NEXT:    vinserti64x4 $1, %ymm0, %zmm1, %zmm0
+; AVX512BW-NEXT:    movq %rbp, %rsp
+; AVX512BW-NEXT:    popq %rbp
+; AVX512BW-NEXT:    retq
 ;
-; VBMI-LABEL: var_shuffle_v64i8:
-; VBMI:       # %bb.0:
-; VBMI-NEXT:    vpermb %zmm0, %zmm1, %zmm0
-; VBMI-NEXT:    retq
+; AVX512VBMI-LABEL: var_shuffle_v64i8:
+; AVX512VBMI:       # %bb.0:
+; AVX512VBMI-NEXT:    vpermb %zmm0, %zmm1, %zmm0
+; AVX512VBMI-NEXT:    retq
   %index0 = extractelement <64 x i8> %indices, i32 0
   %index1 = extractelement <64 x i8> %indices, i32 1
   %index2 = extractelement <64 x i8> %indices, i32 2


        


More information about the llvm-commits mailing list