[llvm] r323212 - [X86] Rewrite vXi1 element insertion by using a vXi1 scalar_to_vector and inserting into a vXi1 vector.
Craig Topper via llvm-commits
llvm-commits at lists.llvm.org
Tue Jan 23 07:56:37 PST 2018
Author: ctopper
Date: Tue Jan 23 07:56:36 2018
New Revision: 323212
URL: http://llvm.org/viewvc/llvm-project?rev=323212&view=rev
Log:
[X86] Rewrite vXi1 element insertion by using a vXi1 scalar_to_vector and inserting into a vXi1 vector.
The existing code was already doing something very similar to subvector insertion so this allows us to remove the nearly duplicate code.
This patch is a little larger than it should be due to differences between the DQI handling between the two today.
Modified:
llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
llvm/trunk/test/CodeGen/X86/avx512-cvt.ll
llvm/trunk/test/CodeGen/X86/avx512-insert-extract.ll
llvm/trunk/test/CodeGen/X86/avx512-mask-op.ll
llvm/trunk/test/CodeGen/X86/avx512-schedule.ll
llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll
llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll
Modified: llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86ISelLowering.cpp?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86ISelLowering.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86ISelLowering.cpp Tue Jan 23 07:56:36 2018
@@ -1198,6 +1198,7 @@ X86TargetLowering::X86TargetLowering(con
setOperationAction(ISD::CONCAT_VECTORS, MVT::v16i1, Custom);
setOperationAction(ISD::CONCAT_VECTORS, MVT::v8i1, Custom);
setOperationAction(ISD::CONCAT_VECTORS, MVT::v4i1, Custom);
+ setOperationAction(ISD::INSERT_SUBVECTOR, MVT::v2i1, Custom);
setOperationAction(ISD::INSERT_SUBVECTOR, MVT::v4i1, Custom);
setOperationAction(ISD::INSERT_SUBVECTOR, MVT::v8i1, Custom);
setOperationAction(ISD::INSERT_SUBVECTOR, MVT::v16i1, Custom);
@@ -14924,74 +14925,11 @@ static SDValue InsertBitToMaskVector(SDV
return DAG.getNode(ISD::TRUNCATE, dl, VecVT, ExtOp);
}
- unsigned IdxVal = cast<ConstantSDNode>(Idx)->getZExtValue();
- unsigned NumElems = VecVT.getVectorNumElements();
-
- // If the kshift instructions of the correct width aren't natively supported
- // then we need to promote the vector to the native size to get the correct
- // zeroing behavior.
- if ((!Subtarget.hasDQI() && NumElems == 8) || (NumElems < 8)) {
- // Need to promote to v16i1, do the insert, then extract back.
- Vec = DAG.getNode(ISD::INSERT_SUBVECTOR, dl, MVT::v16i1,
- DAG.getUNDEF(MVT::v16i1), Vec,
- DAG.getIntPtrConstant(0, dl));
- Op = DAG.getNode(ISD::INSERT_VECTOR_ELT, dl, MVT::v16i1, Vec, Elt, Idx);
- return DAG.getNode(ISD::EXTRACT_SUBVECTOR, dl, VecVT, Op,
- DAG.getIntPtrConstant(0, dl));
- }
+ // Copy into a k-register, extract to v1i1 and insert_subvector.
+ SDValue EltInVec = DAG.getNode(ISD::SCALAR_TO_VECTOR, dl, MVT::v1i1, Elt);
- SDValue EltInVec = DAG.getNode(ISD::SCALAR_TO_VECTOR, dl, VecVT, Elt);
-
- if (Vec.isUndef()) {
- if (IdxVal)
- EltInVec = DAG.getNode(X86ISD::KSHIFTL, dl, VecVT, EltInVec,
- DAG.getConstant(IdxVal, dl, MVT::i8));
- return EltInVec;
- }
-
- // Insertion of one bit into first position
- if (IdxVal == 0) {
- // Clean top bits of vector.
- EltInVec = DAG.getNode(ISD::SCALAR_TO_VECTOR, dl, MVT::v1i1, Elt);
- EltInVec = DAG.getNode(ISD::INSERT_SUBVECTOR, dl, VecVT,
- getZeroVector(VecVT, Subtarget, DAG, dl),
- EltInVec, DAG.getIntPtrConstant(0, dl));
- // Clean the first bit in source vector.
- Vec = DAG.getNode(X86ISD::KSHIFTR, dl, VecVT, Vec,
- DAG.getConstant(1 , dl, MVT::i8));
- Vec = DAG.getNode(X86ISD::KSHIFTL, dl, VecVT, Vec,
- DAG.getConstant(1, dl, MVT::i8));
-
- return DAG.getNode(ISD::OR, dl, VecVT, Vec, EltInVec);
- }
- // Insertion of one bit into last position
- if (IdxVal == NumElems - 1) {
- // Move the bit to the last position inside the vector.
- EltInVec = DAG.getNode(X86ISD::KSHIFTL, dl, VecVT, EltInVec,
- DAG.getConstant(IdxVal, dl, MVT::i8));
- // Clean the last bit in the source vector.
- Vec = DAG.getNode(X86ISD::KSHIFTL, dl, VecVT, Vec,
- DAG.getConstant(1, dl, MVT::i8));
- Vec = DAG.getNode(X86ISD::KSHIFTR, dl, VecVT, Vec,
- DAG.getConstant(1 , dl, MVT::i8));
-
- return DAG.getNode(ISD::OR, dl, VecVT, Vec, EltInVec);
- }
-
- // Move the current value of the bit to be replace to bit 0.
- SDValue Merged = DAG.getNode(X86ISD::KSHIFTR, dl, VecVT, Vec,
- DAG.getConstant(IdxVal, dl, MVT::i8));
- // Xor with the new bit.
- Merged = DAG.getNode(ISD::XOR, dl, VecVT, Merged, EltInVec);
- // Shift to MSB, filling bottom bits with 0.
- Merged = DAG.getNode(X86ISD::KSHIFTL, dl, VecVT, Merged,
- DAG.getConstant(NumElems - 1, dl, MVT::i8));
- // Shift to the final position, filling upper bits with 0.
- Merged = DAG.getNode(X86ISD::KSHIFTR, dl, VecVT, Merged,
- DAG.getConstant(NumElems - 1 - IdxVal, dl, MVT::i8));
- // Xor with original vector to cancel out the original bit value that's still
- // present.
- return DAG.getNode(ISD::XOR, dl, VecVT, Merged, Vec);
+ return DAG.getNode(ISD::INSERT_SUBVECTOR, dl, VecVT, Vec, EltInVec,
+ Op.getOperand(2));
}
SDValue X86TargetLowering::LowerINSERT_VECTOR_ELT(SDValue Op,
Modified: llvm/trunk/test/CodeGen/X86/avx512-cvt.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-cvt.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-cvt.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-cvt.ll Tue Jan 23 07:56:36 2018
@@ -2161,20 +2161,14 @@ define <2 x i64> @test_2f64toub(<2 x dou
; KNL-LABEL: test_2f64toub:
; KNL: # %bb.0:
; KNL-NEXT: # kill: def %xmm1 killed %xmm1 def %zmm1
-; KNL-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
-; KNL-NEXT: vcvttsd2si %xmm2, %eax
-; KNL-NEXT: kmovw %eax, %k0
; KNL-NEXT: vcvttsd2si %xmm0, %eax
; KNL-NEXT: andl $1, %eax
+; KNL-NEXT: kmovw %eax, %k0
+; KNL-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; KNL-NEXT: vcvttsd2si %xmm0, %eax
; KNL-NEXT: kmovw %eax, %k1
-; KNL-NEXT: kshiftrw $1, %k0, %k2
-; KNL-NEXT: kshiftlw $1, %k2, %k2
-; KNL-NEXT: korw %k1, %k2, %k1
-; KNL-NEXT: kshiftrw $1, %k1, %k2
-; KNL-NEXT: kxorw %k0, %k2, %k0
-; KNL-NEXT: kshiftlw $15, %k0, %k0
-; KNL-NEXT: kshiftrw $14, %k0, %k0
-; KNL-NEXT: kxorw %k1, %k0, %k1
+; KNL-NEXT: kshiftlw $1, %k1, %k1
+; KNL-NEXT: korw %k1, %k0, %k1
; KNL-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; KNL-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; KNL-NEXT: vzeroupper
@@ -2194,17 +2188,12 @@ define <2 x i64> @test_2f64toub(<2 x dou
; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
; AVX512DQ-NEXT: vcvttsd2si %xmm2, %eax
; AVX512DQ-NEXT: kmovw %eax, %k0
+; AVX512DQ-NEXT: kshiftlb $1, %k0, %k0
; AVX512DQ-NEXT: vcvttsd2si %xmm0, %eax
-; AVX512DQ-NEXT: andl $1, %eax
; AVX512DQ-NEXT: kmovw %eax, %k1
-; AVX512DQ-NEXT: kshiftrw $1, %k0, %k2
-; AVX512DQ-NEXT: kshiftlw $1, %k2, %k2
-; AVX512DQ-NEXT: korw %k1, %k2, %k1
-; AVX512DQ-NEXT: kshiftrw $1, %k1, %k2
-; AVX512DQ-NEXT: kxorw %k0, %k2, %k0
-; AVX512DQ-NEXT: kshiftlw $15, %k0, %k0
-; AVX512DQ-NEXT: kshiftrw $14, %k0, %k0
-; AVX512DQ-NEXT: kxorw %k1, %k0, %k1
+; AVX512DQ-NEXT: kshiftlb $7, %k1, %k1
+; AVX512DQ-NEXT: kshiftrb $7, %k1, %k1
+; AVX512DQ-NEXT: korb %k0, %k1, %k1
; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; AVX512DQ-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; AVX512DQ-NEXT: vzeroupper
@@ -2213,20 +2202,14 @@ define <2 x i64> @test_2f64toub(<2 x dou
; AVX512BW-LABEL: test_2f64toub:
; AVX512BW: # %bb.0:
; AVX512BW-NEXT: # kill: def %xmm1 killed %xmm1 def %zmm1
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
-; AVX512BW-NEXT: vcvttsd2si %xmm2, %eax
-; AVX512BW-NEXT: kmovd %eax, %k0
; AVX512BW-NEXT: vcvttsd2si %xmm0, %eax
; AVX512BW-NEXT: andl $1, %eax
-; AVX512BW-NEXT: kmovw %eax, %k1
-; AVX512BW-NEXT: kshiftrw $1, %k0, %k2
-; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
-; AVX512BW-NEXT: korw %k1, %k2, %k1
-; AVX512BW-NEXT: kshiftrw $1, %k1, %k2
-; AVX512BW-NEXT: kxorw %k0, %k2, %k0
-; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
-; AVX512BW-NEXT: kshiftrw $14, %k0, %k0
-; AVX512BW-NEXT: kxorw %k1, %k0, %k1
+; AVX512BW-NEXT: kmovw %eax, %k0
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; AVX512BW-NEXT: vcvttsd2si %xmm0, %eax
+; AVX512BW-NEXT: kmovd %eax, %k1
+; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
+; AVX512BW-NEXT: korw %k1, %k0, %k1
; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; AVX512BW-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; AVX512BW-NEXT: vzeroupper
@@ -2365,20 +2348,14 @@ define <2 x i64> @test_2f64tosb(<2 x dou
; KNL-LABEL: test_2f64tosb:
; KNL: # %bb.0:
; KNL-NEXT: # kill: def %xmm1 killed %xmm1 def %zmm1
-; KNL-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
-; KNL-NEXT: vcvttsd2si %xmm2, %eax
-; KNL-NEXT: kmovw %eax, %k0
; KNL-NEXT: vcvttsd2si %xmm0, %eax
; KNL-NEXT: andl $1, %eax
+; KNL-NEXT: kmovw %eax, %k0
+; KNL-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; KNL-NEXT: vcvttsd2si %xmm0, %eax
; KNL-NEXT: kmovw %eax, %k1
-; KNL-NEXT: kshiftrw $1, %k0, %k2
-; KNL-NEXT: kshiftlw $1, %k2, %k2
-; KNL-NEXT: korw %k1, %k2, %k1
-; KNL-NEXT: kshiftrw $1, %k1, %k2
-; KNL-NEXT: kxorw %k0, %k2, %k0
-; KNL-NEXT: kshiftlw $15, %k0, %k0
-; KNL-NEXT: kshiftrw $14, %k0, %k0
-; KNL-NEXT: kxorw %k1, %k0, %k1
+; KNL-NEXT: kshiftlw $1, %k1, %k1
+; KNL-NEXT: korw %k1, %k0, %k1
; KNL-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; KNL-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; KNL-NEXT: vzeroupper
@@ -2398,17 +2375,12 @@ define <2 x i64> @test_2f64tosb(<2 x dou
; AVX512DQ-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
; AVX512DQ-NEXT: vcvttsd2si %xmm2, %eax
; AVX512DQ-NEXT: kmovw %eax, %k0
+; AVX512DQ-NEXT: kshiftlb $1, %k0, %k0
; AVX512DQ-NEXT: vcvttsd2si %xmm0, %eax
-; AVX512DQ-NEXT: andl $1, %eax
; AVX512DQ-NEXT: kmovw %eax, %k1
-; AVX512DQ-NEXT: kshiftrw $1, %k0, %k2
-; AVX512DQ-NEXT: kshiftlw $1, %k2, %k2
-; AVX512DQ-NEXT: korw %k1, %k2, %k1
-; AVX512DQ-NEXT: kshiftrw $1, %k1, %k2
-; AVX512DQ-NEXT: kxorw %k0, %k2, %k0
-; AVX512DQ-NEXT: kshiftlw $15, %k0, %k0
-; AVX512DQ-NEXT: kshiftrw $14, %k0, %k0
-; AVX512DQ-NEXT: kxorw %k1, %k0, %k1
+; AVX512DQ-NEXT: kshiftlb $7, %k1, %k1
+; AVX512DQ-NEXT: kshiftrb $7, %k1, %k1
+; AVX512DQ-NEXT: korb %k0, %k1, %k1
; AVX512DQ-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; AVX512DQ-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; AVX512DQ-NEXT: vzeroupper
@@ -2417,20 +2389,14 @@ define <2 x i64> @test_2f64tosb(<2 x dou
; AVX512BW-LABEL: test_2f64tosb:
; AVX512BW: # %bb.0:
; AVX512BW-NEXT: # kill: def %xmm1 killed %xmm1 def %zmm1
-; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm2 = xmm0[1,0]
-; AVX512BW-NEXT: vcvttsd2si %xmm2, %eax
-; AVX512BW-NEXT: kmovd %eax, %k0
; AVX512BW-NEXT: vcvttsd2si %xmm0, %eax
; AVX512BW-NEXT: andl $1, %eax
-; AVX512BW-NEXT: kmovw %eax, %k1
-; AVX512BW-NEXT: kshiftrw $1, %k0, %k2
-; AVX512BW-NEXT: kshiftlw $1, %k2, %k2
-; AVX512BW-NEXT: korw %k1, %k2, %k1
-; AVX512BW-NEXT: kshiftrw $1, %k1, %k2
-; AVX512BW-NEXT: kxorw %k0, %k2, %k0
-; AVX512BW-NEXT: kshiftlw $15, %k0, %k0
-; AVX512BW-NEXT: kshiftrw $14, %k0, %k0
-; AVX512BW-NEXT: kxorw %k1, %k0, %k1
+; AVX512BW-NEXT: kmovw %eax, %k0
+; AVX512BW-NEXT: vpermilpd {{.*#+}} xmm0 = xmm0[1,0]
+; AVX512BW-NEXT: vcvttsd2si %xmm0, %eax
+; AVX512BW-NEXT: kmovd %eax, %k1
+; AVX512BW-NEXT: kshiftlw $1, %k1, %k1
+; AVX512BW-NEXT: korw %k1, %k0, %k1
; AVX512BW-NEXT: vmovdqa64 %zmm1, %zmm0 {%k1} {z}
; AVX512BW-NEXT: # kill: def %xmm0 killed %xmm0 killed %zmm0
; AVX512BW-NEXT: vzeroupper
Modified: llvm/trunk/test/CodeGen/X86/avx512-insert-extract.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-insert-extract.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-insert-extract.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-insert-extract.ll Tue Jan 23 07:56:36 2018
@@ -307,7 +307,7 @@ define i16 @test16(i1 *%addr, i16 %a) {
; KNL-NEXT: kxorw %k1, %k2, %k1
; KNL-NEXT: kshiftlw $15, %k1, %k1
; KNL-NEXT: kshiftrw $5, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
+; KNL-NEXT: kxorw %k1, %k0, %k0
; KNL-NEXT: kmovw %k0, %eax
; KNL-NEXT: ## kill: def %ax killed %ax killed %eax
; KNL-NEXT: retq
@@ -320,7 +320,7 @@ define i16 @test16(i1 *%addr, i16 %a) {
; SKX-NEXT: kxorw %k0, %k2, %k0
; SKX-NEXT: kshiftlw $15, %k0, %k0
; SKX-NEXT: kshiftrw $5, %k0, %k0
-; SKX-NEXT: kxorw %k1, %k0, %k0
+; SKX-NEXT: kxorw %k0, %k1, %k0
; SKX-NEXT: kmovd %k0, %eax
; SKX-NEXT: ## kill: def %ax killed %ax killed %eax
; SKX-NEXT: retq
@@ -341,7 +341,7 @@ define i8 @test17(i1 *%addr, i8 %a) {
; KNL-NEXT: kxorw %k1, %k2, %k1
; KNL-NEXT: kshiftlw $15, %k1, %k1
; KNL-NEXT: kshiftrw $11, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
+; KNL-NEXT: kxorw %k1, %k0, %k0
; KNL-NEXT: kmovw %k0, %eax
; KNL-NEXT: ## kill: def %al killed %al killed %eax
; KNL-NEXT: retq
@@ -354,7 +354,7 @@ define i8 @test17(i1 *%addr, i8 %a) {
; SKX-NEXT: kxorb %k0, %k2, %k0
; SKX-NEXT: kshiftlb $7, %k0, %k0
; SKX-NEXT: kshiftrb $3, %k0, %k0
-; SKX-NEXT: kxorb %k1, %k0, %k0
+; SKX-NEXT: kxorb %k0, %k1, %k0
; SKX-NEXT: kmovd %k0, %eax
; SKX-NEXT: ## kill: def %al killed %al killed %eax
; SKX-NEXT: retq
@@ -793,7 +793,7 @@ define i32 @test_insertelement_v32i1(i32
; KNL-NEXT: kxorw %k2, %k1, %k1
; KNL-NEXT: kshiftlw $15, %k1, %k1
; KNL-NEXT: kshiftrw $11, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
+; KNL-NEXT: kxorw %k1, %k0, %k0
; KNL-NEXT: kmovw %k0, %eax
; KNL-NEXT: orl %ecx, %eax
; KNL-NEXT: vzeroupper
@@ -811,7 +811,7 @@ define i32 @test_insertelement_v32i1(i32
; SKX-NEXT: kxord %k2, %k1, %k1
; SKX-NEXT: kshiftld $31, %k1, %k1
; SKX-NEXT: kshiftrd $27, %k1, %k1
-; SKX-NEXT: kxord %k0, %k1, %k0
+; SKX-NEXT: kxord %k1, %k0, %k0
; SKX-NEXT: kmovd %k0, %eax
; SKX-NEXT: vzeroupper
; SKX-NEXT: retq
@@ -835,7 +835,7 @@ define i8 @test_iinsertelement_v4i1(i32
; KNL-NEXT: kxorw %k2, %k1, %k1
; KNL-NEXT: kshiftlw $15, %k1, %k1
; KNL-NEXT: kshiftrw $13, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
+; KNL-NEXT: kxorw %k1, %k0, %k0
; KNL-NEXT: kmovw %k0, %eax
; KNL-NEXT: ## kill: def %al killed %al killed %eax
; KNL-NEXT: vzeroupper
@@ -846,12 +846,12 @@ define i8 @test_iinsertelement_v4i1(i32
; SKX-NEXT: cmpl %esi, %edi
; SKX-NEXT: setb %al
; SKX-NEXT: vpcmpltud %xmm1, %xmm0, %k0
-; SKX-NEXT: kshiftrw $2, %k0, %k1
+; SKX-NEXT: kshiftrb $2, %k0, %k1
; SKX-NEXT: kmovd %eax, %k2
-; SKX-NEXT: kxorw %k2, %k1, %k1
-; SKX-NEXT: kshiftlw $15, %k1, %k1
-; SKX-NEXT: kshiftrw $13, %k1, %k1
-; SKX-NEXT: kxorw %k0, %k1, %k0
+; SKX-NEXT: kxorb %k2, %k1, %k1
+; SKX-NEXT: kshiftlb $7, %k1, %k1
+; SKX-NEXT: kshiftrb $5, %k1, %k1
+; SKX-NEXT: kxorb %k1, %k0, %k0
; SKX-NEXT: kmovd %k0, %eax
; SKX-NEXT: ## kill: def %al killed %al killed %eax
; SKX-NEXT: retq
@@ -871,12 +871,11 @@ define i8 @test_iinsertelement_v2i1(i32
; KNL-NEXT: cmpl %esi, %edi
; KNL-NEXT: setb %al
; KNL-NEXT: vpcmpltuq %zmm1, %zmm0, %k0
-; KNL-NEXT: kshiftrw $1, %k0, %k1
-; KNL-NEXT: kmovw %eax, %k2
-; KNL-NEXT: kxorw %k2, %k1, %k1
-; KNL-NEXT: kshiftlw $15, %k1, %k1
-; KNL-NEXT: kshiftrw $14, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
+; KNL-NEXT: kshiftlw $15, %k0, %k0
+; KNL-NEXT: kshiftrw $15, %k0, %k0
+; KNL-NEXT: kmovw %eax, %k1
+; KNL-NEXT: kshiftlw $1, %k1, %k1
+; KNL-NEXT: korw %k1, %k0, %k0
; KNL-NEXT: kmovw %k0, %eax
; KNL-NEXT: ## kill: def %al killed %al killed %eax
; KNL-NEXT: vzeroupper
@@ -887,12 +886,11 @@ define i8 @test_iinsertelement_v2i1(i32
; SKX-NEXT: cmpl %esi, %edi
; SKX-NEXT: setb %al
; SKX-NEXT: vpcmpltuq %xmm1, %xmm0, %k0
-; SKX-NEXT: kshiftrw $1, %k0, %k1
-; SKX-NEXT: kmovd %eax, %k2
-; SKX-NEXT: kxorw %k2, %k1, %k1
-; SKX-NEXT: kshiftlw $15, %k1, %k1
-; SKX-NEXT: kshiftrw $14, %k1, %k1
-; SKX-NEXT: kxorw %k0, %k1, %k0
+; SKX-NEXT: kshiftlb $7, %k0, %k0
+; SKX-NEXT: kshiftrb $7, %k0, %k0
+; SKX-NEXT: kmovd %eax, %k1
+; SKX-NEXT: kshiftlb $1, %k1, %k1
+; SKX-NEXT: korb %k1, %k0, %k0
; SKX-NEXT: kmovd %k0, %eax
; SKX-NEXT: ## kill: def %al killed %al killed %eax
; SKX-NEXT: retq
Modified: llvm/trunk/test/CodeGen/X86/avx512-mask-op.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-mask-op.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-mask-op.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-mask-op.ll Tue Jan 23 07:56:36 2018
@@ -990,7 +990,7 @@ define <64 x i8> @test16(i64 %x) {
; KNL-NEXT: kxorw %k4, %k5, %k4
; KNL-NEXT: kshiftlw $15, %k4, %k4
; KNL-NEXT: kshiftrw $10, %k4, %k4
-; KNL-NEXT: kxorw %k0, %k4, %k4
+; KNL-NEXT: kxorw %k4, %k0, %k4
; KNL-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k3} {z}
; KNL-NEXT: vpmovdb %zmm0, %xmm0
; KNL-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
@@ -1012,7 +1012,7 @@ define <64 x i8> @test16(i64 %x) {
; SKX-NEXT: kxorq %k1, %k2, %k1
; SKX-NEXT: kshiftlq $63, %k1, %k1
; SKX-NEXT: kshiftrq $58, %k1, %k1
-; SKX-NEXT: kxorq %k0, %k1, %k0
+; SKX-NEXT: kxorq %k1, %k0, %k0
; SKX-NEXT: vpmovm2b %k0, %zmm0
; SKX-NEXT: retq
;
@@ -1025,7 +1025,7 @@ define <64 x i8> @test16(i64 %x) {
; AVX512BW-NEXT: kxorq %k1, %k2, %k1
; AVX512BW-NEXT: kshiftlq $63, %k1, %k1
; AVX512BW-NEXT: kshiftrq $58, %k1, %k1
-; AVX512BW-NEXT: kxorq %k0, %k1, %k0
+; AVX512BW-NEXT: kxorq %k1, %k0, %k0
; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
; AVX512BW-NEXT: retq
;
@@ -1046,7 +1046,7 @@ define <64 x i8> @test16(i64 %x) {
; AVX512DQ-NEXT: kxorw %k4, %k5, %k4
; AVX512DQ-NEXT: kshiftlw $15, %k4, %k4
; AVX512DQ-NEXT: kshiftrw $10, %k4, %k4
-; AVX512DQ-NEXT: kxorw %k0, %k4, %k0
+; AVX512DQ-NEXT: kxorw %k4, %k0, %k0
; AVX512DQ-NEXT: vpmovm2d %k3, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vpmovm2d %k2, %zmm1
@@ -1079,12 +1079,12 @@ define <64 x i8> @test17(i64 %x, i32 %y,
; KNL-NEXT: kmovw %edi, %k3
; KNL-NEXT: cmpl %edx, %esi
; KNL-NEXT: setg %al
-; KNL-NEXT: kmovw %eax, %k4
-; KNL-NEXT: kshiftrw $5, %k0, %k5
-; KNL-NEXT: kxorw %k4, %k5, %k4
+; KNL-NEXT: kshiftrw $5, %k0, %k4
+; KNL-NEXT: kmovw %eax, %k5
+; KNL-NEXT: kxorw %k5, %k4, %k4
; KNL-NEXT: kshiftlw $15, %k4, %k4
; KNL-NEXT: kshiftrw $10, %k4, %k4
-; KNL-NEXT: kxorw %k0, %k4, %k4
+; KNL-NEXT: kxorw %k4, %k0, %k4
; KNL-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k3} {z}
; KNL-NEXT: vpmovdb %zmm0, %xmm0
; KNL-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k2} {z}
@@ -1107,7 +1107,7 @@ define <64 x i8> @test17(i64 %x, i32 %y,
; SKX-NEXT: kxorq %k1, %k2, %k1
; SKX-NEXT: kshiftlq $63, %k1, %k1
; SKX-NEXT: kshiftrq $58, %k1, %k1
-; SKX-NEXT: kxorq %k0, %k1, %k0
+; SKX-NEXT: kxorq %k1, %k0, %k0
; SKX-NEXT: vpmovm2b %k0, %zmm0
; SKX-NEXT: retq
;
@@ -1121,7 +1121,7 @@ define <64 x i8> @test17(i64 %x, i32 %y,
; AVX512BW-NEXT: kxorq %k1, %k2, %k1
; AVX512BW-NEXT: kshiftlq $63, %k1, %k1
; AVX512BW-NEXT: kshiftrq $58, %k1, %k1
-; AVX512BW-NEXT: kxorq %k0, %k1, %k0
+; AVX512BW-NEXT: kxorq %k1, %k0, %k0
; AVX512BW-NEXT: vpmovm2b %k0, %zmm0
; AVX512BW-NEXT: retq
;
@@ -1138,12 +1138,12 @@ define <64 x i8> @test17(i64 %x, i32 %y,
; AVX512DQ-NEXT: kmovw %edi, %k3
; AVX512DQ-NEXT: cmpl %edx, %esi
; AVX512DQ-NEXT: setg %al
-; AVX512DQ-NEXT: kmovw %eax, %k4
-; AVX512DQ-NEXT: kshiftrw $5, %k0, %k5
-; AVX512DQ-NEXT: kxorw %k4, %k5, %k4
+; AVX512DQ-NEXT: kshiftrw $5, %k0, %k4
+; AVX512DQ-NEXT: kmovw %eax, %k5
+; AVX512DQ-NEXT: kxorw %k5, %k4, %k4
; AVX512DQ-NEXT: kshiftlw $15, %k4, %k4
; AVX512DQ-NEXT: kshiftrw $10, %k4, %k4
-; AVX512DQ-NEXT: kxorw %k0, %k4, %k0
+; AVX512DQ-NEXT: kxorw %k4, %k0, %k0
; AVX512DQ-NEXT: vpmovm2d %k3, %zmm0
; AVX512DQ-NEXT: vpmovdb %zmm0, %xmm0
; AVX512DQ-NEXT: vpmovm2d %k2, %zmm1
@@ -1165,20 +1165,19 @@ define <64 x i8> @test17(i64 %x, i32 %y,
define <8 x i1> @test18(i8 %a, i16 %y) {
; KNL-LABEL: test18:
; KNL: ## %bb.0:
-; KNL-NEXT: kmovw %edi, %k0
-; KNL-NEXT: kmovw %esi, %k1
-; KNL-NEXT: kshiftrw $8, %k1, %k2
-; KNL-NEXT: kshiftrw $9, %k1, %k1
-; KNL-NEXT: kshiftrw $6, %k0, %k3
-; KNL-NEXT: kxorw %k1, %k3, %k1
-; KNL-NEXT: kshiftlw $15, %k1, %k1
-; KNL-NEXT: kshiftrw $9, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k0
-; KNL-NEXT: kshiftrw $7, %k0, %k1
+; KNL-NEXT: kmovw %edi, %k1
+; KNL-NEXT: kmovw %esi, %k2
+; KNL-NEXT: kshiftrw $8, %k2, %k0
+; KNL-NEXT: kshiftrw $9, %k2, %k2
+; KNL-NEXT: kshiftrw $6, %k1, %k3
+; KNL-NEXT: kxorw %k2, %k3, %k2
+; KNL-NEXT: kshiftlw $15, %k2, %k2
+; KNL-NEXT: kshiftrw $9, %k2, %k2
; KNL-NEXT: kxorw %k2, %k1, %k1
-; KNL-NEXT: kshiftlw $15, %k1, %k1
-; KNL-NEXT: kshiftrw $8, %k1, %k1
-; KNL-NEXT: kxorw %k0, %k1, %k1
+; KNL-NEXT: kshiftlw $9, %k1, %k1
+; KNL-NEXT: kshiftrw $9, %k1, %k1
+; KNL-NEXT: kshiftlw $7, %k0, %k0
+; KNL-NEXT: korw %k0, %k1, %k1
; KNL-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
; KNL-NEXT: vpmovdw %zmm0, %ymm0
; KNL-NEXT: ## kill: def %xmm0 killed %xmm0 killed %ymm0
@@ -1187,38 +1186,37 @@ define <8 x i1> @test18(i8 %a, i16 %y) {
;
; SKX-LABEL: test18:
; SKX: ## %bb.0:
-; SKX-NEXT: kmovd %edi, %k0
-; SKX-NEXT: kmovd %esi, %k1
-; SKX-NEXT: kshiftrw $8, %k1, %k2
-; SKX-NEXT: kshiftrw $9, %k1, %k1
-; SKX-NEXT: kshiftrb $6, %k0, %k3
-; SKX-NEXT: kxorb %k1, %k3, %k1
-; SKX-NEXT: kshiftlb $7, %k1, %k1
+; SKX-NEXT: kmovd %edi, %k1
+; SKX-NEXT: kmovd %esi, %k2
+; SKX-NEXT: kshiftrw $8, %k2, %k0
+; SKX-NEXT: kshiftrw $9, %k2, %k2
+; SKX-NEXT: kshiftrb $6, %k1, %k3
+; SKX-NEXT: kxorb %k2, %k3, %k2
+; SKX-NEXT: kshiftlb $7, %k2, %k2
+; SKX-NEXT: kshiftrb $1, %k2, %k2
+; SKX-NEXT: kxorb %k2, %k1, %k1
+; SKX-NEXT: kshiftlb $1, %k1, %k1
; SKX-NEXT: kshiftrb $1, %k1, %k1
-; SKX-NEXT: kxorb %k0, %k1, %k0
-; SKX-NEXT: kshiftlb $1, %k0, %k0
-; SKX-NEXT: kshiftrb $1, %k0, %k0
-; SKX-NEXT: kshiftlb $7, %k2, %k1
-; SKX-NEXT: korb %k1, %k0, %k0
+; SKX-NEXT: kshiftlb $7, %k0, %k0
+; SKX-NEXT: korb %k0, %k1, %k0
; SKX-NEXT: vpmovm2w %k0, %xmm0
; SKX-NEXT: retq
;
; AVX512BW-LABEL: test18:
; AVX512BW: ## %bb.0:
-; AVX512BW-NEXT: kmovd %edi, %k0
-; AVX512BW-NEXT: kmovd %esi, %k1
-; AVX512BW-NEXT: kshiftrw $8, %k1, %k2
-; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
-; AVX512BW-NEXT: kshiftrw $6, %k0, %k3
-; AVX512BW-NEXT: kxorw %k1, %k3, %k1
-; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
-; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
-; AVX512BW-NEXT: kxorw %k0, %k1, %k0
-; AVX512BW-NEXT: kshiftrw $7, %k0, %k1
+; AVX512BW-NEXT: kmovd %edi, %k1
+; AVX512BW-NEXT: kmovd %esi, %k2
+; AVX512BW-NEXT: kshiftrw $8, %k2, %k0
+; AVX512BW-NEXT: kshiftrw $9, %k2, %k2
+; AVX512BW-NEXT: kshiftrw $6, %k1, %k3
+; AVX512BW-NEXT: kxorw %k2, %k3, %k2
+; AVX512BW-NEXT: kshiftlw $15, %k2, %k2
+; AVX512BW-NEXT: kshiftrw $9, %k2, %k2
; AVX512BW-NEXT: kxorw %k2, %k1, %k1
-; AVX512BW-NEXT: kshiftlw $15, %k1, %k1
-; AVX512BW-NEXT: kshiftrw $8, %k1, %k1
-; AVX512BW-NEXT: kxorw %k0, %k1, %k0
+; AVX512BW-NEXT: kshiftlw $9, %k1, %k1
+; AVX512BW-NEXT: kshiftrw $9, %k1, %k1
+; AVX512BW-NEXT: kshiftlw $7, %k0, %k0
+; AVX512BW-NEXT: korw %k0, %k1, %k0
; AVX512BW-NEXT: vpmovm2w %k0, %zmm0
; AVX512BW-NEXT: ## kill: def %xmm0 killed %xmm0 killed %zmm0
; AVX512BW-NEXT: vzeroupper
@@ -1226,19 +1224,19 @@ define <8 x i1> @test18(i8 %a, i16 %y) {
;
; AVX512DQ-LABEL: test18:
; AVX512DQ: ## %bb.0:
-; AVX512DQ-NEXT: kmovw %edi, %k0
-; AVX512DQ-NEXT: kmovw %esi, %k1
-; AVX512DQ-NEXT: kshiftrw $8, %k1, %k2
-; AVX512DQ-NEXT: kshiftrw $9, %k1, %k1
-; AVX512DQ-NEXT: kshiftrb $6, %k0, %k3
-; AVX512DQ-NEXT: kxorb %k1, %k3, %k1
-; AVX512DQ-NEXT: kshiftlb $7, %k1, %k1
+; AVX512DQ-NEXT: kmovw %edi, %k1
+; AVX512DQ-NEXT: kmovw %esi, %k2
+; AVX512DQ-NEXT: kshiftrw $8, %k2, %k0
+; AVX512DQ-NEXT: kshiftrw $9, %k2, %k2
+; AVX512DQ-NEXT: kshiftrb $6, %k1, %k3
+; AVX512DQ-NEXT: kxorb %k2, %k3, %k2
+; AVX512DQ-NEXT: kshiftlb $7, %k2, %k2
+; AVX512DQ-NEXT: kshiftrb $1, %k2, %k2
+; AVX512DQ-NEXT: kxorb %k2, %k1, %k1
+; AVX512DQ-NEXT: kshiftlb $1, %k1, %k1
; AVX512DQ-NEXT: kshiftrb $1, %k1, %k1
-; AVX512DQ-NEXT: kxorb %k0, %k1, %k0
-; AVX512DQ-NEXT: kshiftlb $1, %k0, %k0
-; AVX512DQ-NEXT: kshiftrb $1, %k0, %k0
-; AVX512DQ-NEXT: kshiftlb $7, %k2, %k1
-; AVX512DQ-NEXT: korb %k1, %k0, %k0
+; AVX512DQ-NEXT: kshiftlb $7, %k0, %k0
+; AVX512DQ-NEXT: korb %k0, %k1, %k0
; AVX512DQ-NEXT: vpmovm2d %k0, %zmm0
; AVX512DQ-NEXT: vpmovdw %zmm0, %ymm0
; AVX512DQ-NEXT: ## kill: def %xmm0 killed %xmm0 killed %ymm0
Modified: llvm/trunk/test/CodeGen/X86/avx512-schedule.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-schedule.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-schedule.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-schedule.ll Tue Jan 23 07:56:36 2018
@@ -7305,7 +7305,7 @@ define <64 x i8> @vmov_test16(i64 %x) {
; GENERIC-NEXT: kxorq %k1, %k2, %k1 # sched: [1:1.00]
; GENERIC-NEXT: kshiftlq $63, %k1, %k1 # sched: [1:1.00]
; GENERIC-NEXT: kshiftrq $58, %k1, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: kxorq %k0, %k1, %k0 # sched: [1:1.00]
+; GENERIC-NEXT: kxorq %k1, %k0, %k0 # sched: [1:1.00]
; GENERIC-NEXT: vpmovm2b %k0, %zmm0 # sched: [1:0.33]
; GENERIC-NEXT: retq # sched: [1:1.00]
;
@@ -7318,7 +7318,7 @@ define <64 x i8> @vmov_test16(i64 %x) {
; SKX-NEXT: kxorq %k1, %k2, %k1 # sched: [1:1.00]
; SKX-NEXT: kshiftlq $63, %k1, %k1 # sched: [3:1.00]
; SKX-NEXT: kshiftrq $58, %k1, %k1 # sched: [3:1.00]
-; SKX-NEXT: kxorq %k0, %k1, %k0 # sched: [1:1.00]
+; SKX-NEXT: kxorq %k1, %k0, %k0 # sched: [1:1.00]
; SKX-NEXT: vpmovm2b %k0, %zmm0 # sched: [1:0.25]
; SKX-NEXT: retq # sched: [7:1.00]
%a = bitcast i64 %x to <64 x i1>
@@ -7339,7 +7339,7 @@ define <64 x i8> @vmov_test17(i64 %x, i3
; GENERIC-NEXT: kxorq %k1, %k2, %k1 # sched: [1:1.00]
; GENERIC-NEXT: kshiftlq $63, %k1, %k1 # sched: [1:1.00]
; GENERIC-NEXT: kshiftrq $58, %k1, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: kxorq %k0, %k1, %k0 # sched: [1:1.00]
+; GENERIC-NEXT: kxorq %k1, %k0, %k0 # sched: [1:1.00]
; GENERIC-NEXT: vpmovm2b %k0, %zmm0 # sched: [1:0.33]
; GENERIC-NEXT: retq # sched: [1:1.00]
;
@@ -7353,7 +7353,7 @@ define <64 x i8> @vmov_test17(i64 %x, i3
; SKX-NEXT: kxorq %k1, %k2, %k1 # sched: [1:1.00]
; SKX-NEXT: kshiftlq $63, %k1, %k1 # sched: [3:1.00]
; SKX-NEXT: kshiftrq $58, %k1, %k1 # sched: [3:1.00]
-; SKX-NEXT: kxorq %k0, %k1, %k0 # sched: [1:1.00]
+; SKX-NEXT: kxorq %k1, %k0, %k0 # sched: [1:1.00]
; SKX-NEXT: vpmovm2b %k0, %zmm0 # sched: [1:0.25]
; SKX-NEXT: retq # sched: [7:1.00]
%a = bitcast i64 %x to <64 x i1>
@@ -7366,37 +7366,37 @@ define <64 x i8> @vmov_test17(i64 %x, i3
define <8 x i1> @vmov_test18(i8 %a, i16 %y) {
; GENERIC-LABEL: vmov_test18:
; GENERIC: # %bb.0:
-; GENERIC-NEXT: kmovd %edi, %k0 # sched: [1:0.33]
-; GENERIC-NEXT: kmovd %esi, %k1 # sched: [1:0.33]
-; GENERIC-NEXT: kshiftrw $8, %k1, %k2 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftrw $9, %k1, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftrb $6, %k0, %k3 # sched: [1:1.00]
-; GENERIC-NEXT: kxorb %k1, %k3, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftlb $7, %k1, %k1 # sched: [1:1.00]
+; GENERIC-NEXT: kmovd %edi, %k1 # sched: [1:0.33]
+; GENERIC-NEXT: kmovd %esi, %k2 # sched: [1:0.33]
+; GENERIC-NEXT: kshiftrw $8, %k2, %k0 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftrw $9, %k2, %k2 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftrb $6, %k1, %k3 # sched: [1:1.00]
+; GENERIC-NEXT: kxorb %k2, %k3, %k2 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftlb $7, %k2, %k2 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftrb $1, %k2, %k2 # sched: [1:1.00]
+; GENERIC-NEXT: kxorb %k2, %k1, %k1 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftlb $1, %k1, %k1 # sched: [1:1.00]
; GENERIC-NEXT: kshiftrb $1, %k1, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: kxorb %k0, %k1, %k0 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftlb $1, %k0, %k0 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftrb $1, %k0, %k0 # sched: [1:1.00]
-; GENERIC-NEXT: kshiftlb $7, %k2, %k1 # sched: [1:1.00]
-; GENERIC-NEXT: korb %k1, %k0, %k0 # sched: [1:1.00]
+; GENERIC-NEXT: kshiftlb $7, %k0, %k0 # sched: [1:1.00]
+; GENERIC-NEXT: korb %k0, %k1, %k0 # sched: [1:1.00]
; GENERIC-NEXT: vpmovm2w %k0, %xmm0 # sched: [1:0.33]
; GENERIC-NEXT: retq # sched: [1:1.00]
;
; SKX-LABEL: vmov_test18:
; SKX: # %bb.0:
-; SKX-NEXT: kmovd %edi, %k0 # sched: [1:1.00]
-; SKX-NEXT: kmovd %esi, %k1 # sched: [1:1.00]
-; SKX-NEXT: kshiftrw $8, %k1, %k2 # sched: [3:1.00]
-; SKX-NEXT: kshiftrw $9, %k1, %k1 # sched: [3:1.00]
-; SKX-NEXT: kshiftrb $6, %k0, %k3 # sched: [3:1.00]
-; SKX-NEXT: kxorb %k1, %k3, %k1 # sched: [1:1.00]
-; SKX-NEXT: kshiftlb $7, %k1, %k1 # sched: [3:1.00]
+; SKX-NEXT: kmovd %edi, %k1 # sched: [1:1.00]
+; SKX-NEXT: kmovd %esi, %k2 # sched: [1:1.00]
+; SKX-NEXT: kshiftrw $8, %k2, %k0 # sched: [3:1.00]
+; SKX-NEXT: kshiftrw $9, %k2, %k2 # sched: [3:1.00]
+; SKX-NEXT: kshiftrb $6, %k1, %k3 # sched: [3:1.00]
+; SKX-NEXT: kxorb %k2, %k3, %k2 # sched: [1:1.00]
+; SKX-NEXT: kshiftlb $7, %k2, %k2 # sched: [3:1.00]
+; SKX-NEXT: kshiftrb $1, %k2, %k2 # sched: [3:1.00]
+; SKX-NEXT: kxorb %k2, %k1, %k1 # sched: [1:1.00]
+; SKX-NEXT: kshiftlb $1, %k1, %k1 # sched: [3:1.00]
; SKX-NEXT: kshiftrb $1, %k1, %k1 # sched: [3:1.00]
-; SKX-NEXT: kxorb %k0, %k1, %k0 # sched: [1:1.00]
-; SKX-NEXT: kshiftlb $1, %k0, %k0 # sched: [3:1.00]
-; SKX-NEXT: kshiftrb $1, %k0, %k0 # sched: [3:1.00]
-; SKX-NEXT: kshiftlb $7, %k2, %k1 # sched: [3:1.00]
-; SKX-NEXT: korb %k1, %k0, %k0 # sched: [1:1.00]
+; SKX-NEXT: kshiftlb $7, %k0, %k0 # sched: [3:1.00]
+; SKX-NEXT: korb %k0, %k1, %k0 # sched: [1:1.00]
; SKX-NEXT: vpmovm2w %k0, %xmm0 # sched: [1:0.25]
; SKX-NEXT: retq # sched: [7:1.00]
%b = bitcast i8 %a to <8 x i1>
Modified: llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-fast-isel.ll Tue Jan 23 07:56:36 2018
@@ -122,7 +122,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $62, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $2, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: andb $15, %cl
@@ -132,14 +132,14 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $61, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $3, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $60, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $4, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -147,7 +147,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $59, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $5, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -156,7 +156,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $58, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $6, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -164,7 +164,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $57, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $7, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $7, %cl
@@ -172,14 +172,14 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $56, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $55, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $9, %k0, %k1
; X32-NEXT: andb $2, %cl
; X32-NEXT: shrb %cl
@@ -187,7 +187,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $54, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $10, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: movl %eax, %esi
@@ -222,57 +222,57 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $53, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $11, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $52, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $12, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $51, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $13, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $50, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $14, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $49, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $15, %k0, %k1
; X32-NEXT: shrl $15, %edx
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $48, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $16, %k0, %k1
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $47, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $17, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $46, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $18, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $45, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $19, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $44, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $20, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -280,7 +280,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $43, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $21, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -289,7 +289,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $42, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $22, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -297,14 +297,14 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $41, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $23, %k0, %k1
; X32-NEXT: shrb $7, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $40, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $24, %k0, %k1
; X32-NEXT: movl %esi, %edx
; X32-NEXT: shrl $24, %edx
@@ -312,7 +312,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $39, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $25, %k0, %k1
; X32-NEXT: movl %edx, %eax
; X32-NEXT: andb $2, %al
@@ -321,7 +321,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $38, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $26, %k0, %k1
; X32-NEXT: andb $15, %dl
; X32-NEXT: movl %edx, %eax
@@ -330,14 +330,14 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $37, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $27, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $36, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $28, %k0, %k1
; X32-NEXT: movl %esi, %ecx
; X32-NEXT: movl %ecx, %eax
@@ -346,7 +346,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $35, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $29, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $29, %eax
@@ -355,7 +355,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $34, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $30, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $30, %eax
@@ -363,20 +363,20 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $33, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $31, %k0, %k1
; X32-NEXT: shrl $31, %ecx
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $32, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $32, %k0, %k1
; X32-NEXT: kmovd %ebx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $31, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $33, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $2, %al
@@ -385,7 +385,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $30, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $34, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $15, %al
@@ -395,14 +395,14 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $29, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $35, %k0, %k1
; X32-NEXT: shrb $3, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $28, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $36, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $4, %al
@@ -410,7 +410,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $27, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $37, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $5, %al
@@ -419,7 +419,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $26, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $38, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $6, %al
@@ -427,7 +427,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $25, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $39, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $7, %al
@@ -435,7 +435,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $24, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $40, %k0, %k1
; X32-NEXT: movb %bh, %al
; X32-NEXT: kmovd %eax, %k2
@@ -467,12 +467,12 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $23, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $41, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $22, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $42, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: movzwl %bx, %eax
@@ -484,57 +484,57 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kmovd %esi, %k3
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $21, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $43, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: shrl $15, %eax
; X32-NEXT: kmovd %eax, %k4
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $20, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $44, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $19, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $45, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $18, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $46, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $17, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $47, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $16, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $48, %k0, %k1
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $15, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $49, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $14, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $50, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $13, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $51, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $12, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k4
+; X32-NEXT: kxorq %k1, %k0, %k4
; X32-NEXT: kshiftrq $52, %k4, %k0
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrb $4, %al
@@ -562,12 +562,12 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kmovd %eax, %k3
; X32-NEXT: kshiftlq $63, %k5, %k5
; X32-NEXT: kshiftrq $11, %k5, %k5
-; X32-NEXT: kxorq %k4, %k5, %k4
+; X32-NEXT: kxorq %k5, %k4, %k4
; X32-NEXT: kshiftrq $53, %k4, %k5
; X32-NEXT: kxorq %k6, %k5, %k5
; X32-NEXT: kshiftlq $63, %k5, %k5
; X32-NEXT: kshiftrq $10, %k5, %k5
-; X32-NEXT: kxorq %k4, %k5, %k5
+; X32-NEXT: kxorq %k5, %k4, %k5
; X32-NEXT: kshiftrq $54, %k5, %k4
; X32-NEXT: kxorq %k7, %k4, %k6
; X32-NEXT: shrb $3, %cl
@@ -578,12 +578,12 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k6, %k6
; X32-NEXT: kshiftrq $9, %k6, %k6
-; X32-NEXT: kxorq %k5, %k6, %k5
+; X32-NEXT: kxorq %k6, %k5, %k5
; X32-NEXT: kshiftrq $55, %k5, %k6
; X32-NEXT: kxorq %k0, %k6, %k0
; X32-NEXT: kshiftlq $63, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k0
-; X32-NEXT: kxorq %k5, %k0, %k0
+; X32-NEXT: kxorq %k0, %k5, %k0
; X32-NEXT: kshiftrq $56, %k0, %k5
; X32-NEXT: kxorq %k1, %k5, %k1
; X32-NEXT: movl %ebx, %eax
@@ -594,32 +594,32 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: kmovd %eax, %k6
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $7, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $57, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $6, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $58, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $5, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $59, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $4, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $60, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $3, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $61, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $2, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $62, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: shrl $31, %ebx
@@ -627,7 +627,7 @@ define <8 x i64> @test_mm512_mask_set1_e
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $1, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftlq $1, %k0, %k0
; X32-NEXT: kshiftrq $1, %k0, %k0
; X32-NEXT: kshiftlq $63, %k2, %k1
@@ -671,7 +671,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $62, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $2, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: andb $15, %cl
@@ -681,14 +681,14 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $61, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $3, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $60, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $4, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -696,7 +696,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $59, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $5, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -705,7 +705,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $58, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $6, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -713,7 +713,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $57, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $7, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $7, %cl
@@ -721,14 +721,14 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $56, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $55, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $9, %k0, %k1
; X32-NEXT: andb $2, %cl
; X32-NEXT: shrb %cl
@@ -736,7 +736,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $54, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $10, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: movl %eax, %esi
@@ -771,57 +771,57 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: movl {{[0-9]+}}(%esp), %ebx
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $53, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $11, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $52, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $12, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $51, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $13, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $50, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $14, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $49, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $15, %k0, %k1
; X32-NEXT: shrl $15, %edx
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $48, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $16, %k0, %k1
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $47, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $17, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $46, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $18, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $45, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $19, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $44, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $20, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -829,7 +829,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $43, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $21, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -838,7 +838,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $42, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $22, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -846,14 +846,14 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $41, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $23, %k0, %k1
; X32-NEXT: shrb $7, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $40, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $24, %k0, %k1
; X32-NEXT: movl %esi, %edx
; X32-NEXT: shrl $24, %edx
@@ -861,7 +861,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $39, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $25, %k0, %k1
; X32-NEXT: movl %edx, %eax
; X32-NEXT: andb $2, %al
@@ -870,7 +870,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $38, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $26, %k0, %k1
; X32-NEXT: andb $15, %dl
; X32-NEXT: movl %edx, %eax
@@ -879,14 +879,14 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $37, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $27, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $36, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $28, %k0, %k1
; X32-NEXT: movl %esi, %ecx
; X32-NEXT: movl %ecx, %eax
@@ -895,7 +895,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $35, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $29, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $29, %eax
@@ -904,7 +904,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $34, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $30, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $30, %eax
@@ -912,20 +912,20 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $33, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $31, %k0, %k1
; X32-NEXT: shrl $31, %ecx
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $32, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $32, %k0, %k1
; X32-NEXT: kmovd %ebx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $31, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $33, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $2, %al
@@ -934,7 +934,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $30, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $34, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $15, %al
@@ -944,14 +944,14 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $29, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $35, %k0, %k1
; X32-NEXT: shrb $3, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $28, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $36, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $4, %al
@@ -959,7 +959,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $27, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $37, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $5, %al
@@ -968,7 +968,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $26, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $38, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $6, %al
@@ -976,7 +976,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $25, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $39, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $7, %al
@@ -984,7 +984,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $24, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $40, %k0, %k1
; X32-NEXT: movb %bh, %al
; X32-NEXT: kmovd %eax, %k2
@@ -1016,12 +1016,12 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $23, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $41, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $22, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $42, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: movzwl %bx, %eax
@@ -1033,57 +1033,57 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kmovd %esi, %k3
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $21, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $43, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: shrl $15, %eax
; X32-NEXT: kmovd %eax, %k4
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $20, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $44, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $19, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $45, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $18, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $46, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $17, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $47, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $16, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $48, %k0, %k1
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $15, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $49, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $14, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $50, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $13, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $51, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $12, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k4
+; X32-NEXT: kxorq %k1, %k0, %k4
; X32-NEXT: kshiftrq $52, %k4, %k0
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrb $4, %al
@@ -1111,12 +1111,12 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kmovd %eax, %k3
; X32-NEXT: kshiftlq $63, %k5, %k5
; X32-NEXT: kshiftrq $11, %k5, %k5
-; X32-NEXT: kxorq %k4, %k5, %k4
+; X32-NEXT: kxorq %k5, %k4, %k4
; X32-NEXT: kshiftrq $53, %k4, %k5
; X32-NEXT: kxorq %k6, %k5, %k5
; X32-NEXT: kshiftlq $63, %k5, %k5
; X32-NEXT: kshiftrq $10, %k5, %k5
-; X32-NEXT: kxorq %k4, %k5, %k5
+; X32-NEXT: kxorq %k5, %k4, %k5
; X32-NEXT: kshiftrq $54, %k5, %k4
; X32-NEXT: kxorq %k7, %k4, %k6
; X32-NEXT: shrb $3, %cl
@@ -1127,12 +1127,12 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k6, %k6
; X32-NEXT: kshiftrq $9, %k6, %k6
-; X32-NEXT: kxorq %k5, %k6, %k5
+; X32-NEXT: kxorq %k6, %k5, %k5
; X32-NEXT: kshiftrq $55, %k5, %k6
; X32-NEXT: kxorq %k0, %k6, %k0
; X32-NEXT: kshiftlq $63, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k0
-; X32-NEXT: kxorq %k5, %k0, %k0
+; X32-NEXT: kxorq %k0, %k5, %k0
; X32-NEXT: kshiftrq $56, %k0, %k5
; X32-NEXT: kxorq %k1, %k5, %k1
; X32-NEXT: movl %ebx, %eax
@@ -1143,32 +1143,32 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: kmovd %eax, %k6
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $7, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $57, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $6, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $58, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $5, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $59, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $4, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $60, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $3, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $61, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $2, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $62, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: shrl $31, %ebx
@@ -1176,7 +1176,7 @@ define <8 x i64> @test_mm512_maskz_set1_
; X32-NEXT: movb {{[0-9]+}}(%esp), %al
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $1, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftlq $1, %k0, %k0
; X32-NEXT: kshiftrq $1, %k0, %k0
; X32-NEXT: kshiftlq $63, %k2, %k1
@@ -1701,7 +1701,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $62, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $2, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: andb $15, %cl
@@ -1711,14 +1711,14 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $61, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $3, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $60, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $4, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -1726,7 +1726,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $59, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $5, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -1735,7 +1735,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $58, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $6, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -1743,7 +1743,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $57, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $7, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $7, %cl
@@ -1751,14 +1751,14 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $56, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $55, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $9, %k0, %k1
; X32-NEXT: andb $2, %cl
; X32-NEXT: shrb %cl
@@ -1766,7 +1766,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $54, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $10, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: movl %eax, %esi
@@ -1801,57 +1801,57 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: movl 12(%ebp), %ebx
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $53, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $11, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $52, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $12, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $51, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $13, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $50, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $14, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $49, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $15, %k0, %k1
; X32-NEXT: shrl $15, %edx
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $48, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $16, %k0, %k1
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $47, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $17, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $46, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $18, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $45, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $19, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $44, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $20, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -1859,7 +1859,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $43, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $21, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -1868,7 +1868,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $42, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $22, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -1876,14 +1876,14 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $41, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $23, %k0, %k1
; X32-NEXT: shrb $7, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $40, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $24, %k0, %k1
; X32-NEXT: movl %esi, %edx
; X32-NEXT: shrl $24, %edx
@@ -1891,7 +1891,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $39, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $25, %k0, %k1
; X32-NEXT: movl %edx, %eax
; X32-NEXT: andb $2, %al
@@ -1900,7 +1900,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $38, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $26, %k0, %k1
; X32-NEXT: andb $15, %dl
; X32-NEXT: movl %edx, %eax
@@ -1909,14 +1909,14 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $37, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $27, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $36, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $28, %k0, %k1
; X32-NEXT: movl %esi, %ecx
; X32-NEXT: movl %ecx, %eax
@@ -1925,7 +1925,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $35, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $29, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $29, %eax
@@ -1934,7 +1934,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $34, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $30, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $30, %eax
@@ -1942,20 +1942,20 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $33, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $31, %k0, %k1
; X32-NEXT: shrl $31, %ecx
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $32, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $32, %k0, %k1
; X32-NEXT: kmovd %ebx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $31, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $33, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $2, %al
@@ -1964,7 +1964,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $30, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $34, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $15, %al
@@ -1974,14 +1974,14 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $29, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $35, %k0, %k1
; X32-NEXT: shrb $3, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $28, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $36, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $4, %al
@@ -1989,7 +1989,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $27, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $37, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $5, %al
@@ -1998,7 +1998,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $26, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $38, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $6, %al
@@ -2006,7 +2006,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $25, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $39, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $7, %al
@@ -2014,7 +2014,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $24, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $40, %k0, %k1
; X32-NEXT: movb %bh, %al
; X32-NEXT: kmovd %eax, %k2
@@ -2046,22 +2046,22 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $23, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $41, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $22, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $42, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $21, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $43, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $20, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $44, %k0, %k1
; X32-NEXT: movzwl %bx, %eax
; X32-NEXT: movl %eax, %esi
@@ -2070,12 +2070,12 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $19, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $45, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $18, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $46, %k0, %k1
; X32-NEXT: movl %eax, %esi
; X32-NEXT: shrl $14, %esi
@@ -2083,37 +2083,37 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $17, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $47, %k0, %k1
; X32-NEXT: shrl $15, %eax
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $16, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $48, %k0, %k1
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $15, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $49, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $14, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $50, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $13, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $51, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $12, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $52, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrb $4, %al
@@ -2141,44 +2141,44 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $11, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $53, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $10, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $54, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $9, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $55, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $8, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $56, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $7, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $57, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $6, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $58, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $5, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $59, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $4, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $60, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $28, %eax
@@ -2186,7 +2186,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $3, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $61, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $29, %eax
@@ -2195,7 +2195,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $2, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $62, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $30, %eax
@@ -2203,7 +2203,7 @@ define i64 @test_mm512_mask_test_epi8_ma
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $1, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftlq $1, %k0, %k0
; X32-NEXT: kshiftrq $1, %k0, %k0
; X32-NEXT: shrl $31, %ebx
@@ -2343,7 +2343,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $62, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $2, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: andb $15, %cl
@@ -2353,14 +2353,14 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $61, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $3, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $60, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $4, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -2368,7 +2368,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $59, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $5, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -2377,7 +2377,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $58, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $6, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -2385,7 +2385,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $57, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $7, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $7, %cl
@@ -2393,14 +2393,14 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $56, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $8, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $55, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $9, %k0, %k1
; X32-NEXT: andb $2, %cl
; X32-NEXT: shrb %cl
@@ -2408,7 +2408,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $54, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $10, %k0, %k1
; X32-NEXT: movb %ah, %cl
; X32-NEXT: movl %eax, %esi
@@ -2443,57 +2443,57 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: movl 12(%ebp), %ebx
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $53, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $11, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $52, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $12, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $51, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $13, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $50, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $14, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $49, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $15, %k0, %k1
; X32-NEXT: shrl $15, %edx
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $48, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $16, %k0, %k1
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $47, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $17, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $46, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $18, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $45, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $19, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $44, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $20, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $4, %cl
@@ -2501,7 +2501,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $43, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $21, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $5, %cl
@@ -2510,7 +2510,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $42, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $22, %k0, %k1
; X32-NEXT: movl %eax, %ecx
; X32-NEXT: shrb $6, %cl
@@ -2518,14 +2518,14 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $41, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $23, %k0, %k1
; X32-NEXT: shrb $7, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $40, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $24, %k0, %k1
; X32-NEXT: movl %esi, %edx
; X32-NEXT: shrl $24, %edx
@@ -2533,7 +2533,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $39, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $25, %k0, %k1
; X32-NEXT: movl %edx, %eax
; X32-NEXT: andb $2, %al
@@ -2542,7 +2542,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $38, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $26, %k0, %k1
; X32-NEXT: andb $15, %dl
; X32-NEXT: movl %edx, %eax
@@ -2551,14 +2551,14 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $37, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $27, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $36, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $28, %k0, %k1
; X32-NEXT: movl %esi, %ecx
; X32-NEXT: movl %ecx, %eax
@@ -2567,7 +2567,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $35, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $29, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $29, %eax
@@ -2576,7 +2576,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $34, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $30, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrl $30, %eax
@@ -2584,20 +2584,20 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $33, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $31, %k0, %k1
; X32-NEXT: shrl $31, %ecx
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $32, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $32, %k0, %k1
; X32-NEXT: kmovd %ebx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $31, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $33, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $2, %al
@@ -2606,7 +2606,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $30, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $34, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: andb $15, %al
@@ -2616,14 +2616,14 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $29, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $35, %k0, %k1
; X32-NEXT: shrb $3, %al
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $28, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $36, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $4, %al
@@ -2631,7 +2631,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $27, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $37, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $5, %al
@@ -2640,7 +2640,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $26, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $38, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $6, %al
@@ -2648,7 +2648,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $25, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $39, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrb $7, %al
@@ -2656,7 +2656,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $24, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $40, %k0, %k1
; X32-NEXT: movb %bh, %al
; X32-NEXT: kmovd %eax, %k2
@@ -2688,22 +2688,22 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $23, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $41, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $22, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $42, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $21, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $43, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $20, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $44, %k0, %k1
; X32-NEXT: movzwl %bx, %eax
; X32-NEXT: movl %eax, %esi
@@ -2712,12 +2712,12 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $19, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $45, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $18, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $46, %k0, %k1
; X32-NEXT: movl %eax, %esi
; X32-NEXT: shrl $14, %esi
@@ -2725,37 +2725,37 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $17, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $47, %k0, %k1
; X32-NEXT: shrl $15, %eax
; X32-NEXT: kmovd %eax, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $16, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $48, %k0, %k1
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $15, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $49, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $14, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $50, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $13, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $51, %k0, %k1
; X32-NEXT: shrb $3, %dl
; X32-NEXT: kmovd %edx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $12, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $52, %k0, %k1
; X32-NEXT: movl %ecx, %eax
; X32-NEXT: shrb $4, %al
@@ -2783,44 +2783,44 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kmovd %eax, %k7
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $11, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $53, %k0, %k1
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $10, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $54, %k0, %k1
; X32-NEXT: kxorq %k3, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $9, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $55, %k0, %k1
; X32-NEXT: kxorq %k4, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $8, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $56, %k0, %k1
; X32-NEXT: kxorq %k5, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $7, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $57, %k0, %k1
; X32-NEXT: kxorq %k6, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $6, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $58, %k0, %k1
; X32-NEXT: kxorq %k7, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $5, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $59, %k0, %k1
; X32-NEXT: shrb $3, %cl
; X32-NEXT: kmovd %ecx, %k2
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $4, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $60, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $28, %eax
@@ -2828,7 +2828,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $3, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $61, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $29, %eax
@@ -2837,7 +2837,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $2, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftrq $62, %k0, %k1
; X32-NEXT: movl %ebx, %eax
; X32-NEXT: shrl $30, %eax
@@ -2845,7 +2845,7 @@ define i64 @test_mm512_mask_testn_epi8_m
; X32-NEXT: kxorq %k2, %k1, %k1
; X32-NEXT: kshiftlq $63, %k1, %k1
; X32-NEXT: kshiftrq $1, %k1, %k1
-; X32-NEXT: kxorq %k0, %k1, %k0
+; X32-NEXT: kxorq %k1, %k0, %k0
; X32-NEXT: kshiftlq $1, %k0, %k0
; X32-NEXT: kshiftrq $1, %k0, %k0
; X32-NEXT: shrl $31, %ebx
Modified: llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll?rev=323212&r1=323211&r2=323212&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll Tue Jan 23 07:56:36 2018
@@ -1844,7 +1844,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $62, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k5, %k1, %k7
+; AVX512F-32-NEXT: kxorq %k1, %k5, %k7
; AVX512F-32-NEXT: kshiftrq $2, %k7, %k1
; AVX512F-32-NEXT: kxorq %k2, %k1, %k2
; AVX512F-32-NEXT: kmovd %ecx, %k5
@@ -1855,7 +1855,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $61, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
+; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $3, %k7, %k2
; AVX512F-32-NEXT: kxorq %k0, %k2, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -1863,7 +1863,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $60, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k7, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftrq $4, %k0, %k7
; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k3
@@ -1872,7 +1872,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $59, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k0, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
; AVX512F-32-NEXT: kshiftrq $5, %k7, %k0
; AVX512F-32-NEXT: kxorq %k4, %k0, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -1881,7 +1881,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $58, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k7, %k4, %k7
+; AVX512F-32-NEXT: kxorq %k4, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $6, %k7, %k4
; AVX512F-32-NEXT: kxorq %k6, %k4, %k6
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -1890,7 +1890,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %bl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $57, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
; AVX512F-32-NEXT: kshiftrq $7, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %ebx, %k5
@@ -1898,7 +1898,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $56, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k7
; AVX512F-32-NEXT: kshiftrq $8, %k7, %k6
; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kmovd %edx, %k6
@@ -1906,7 +1906,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $55, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k7, %k1, %k7
+; AVX512F-32-NEXT: kxorq %k1, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $9, %k7, %k1
; AVX512F-32-NEXT: kxorq %k2, %k1, %k2
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -1914,7 +1914,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $54, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
+; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $10, %k7, %k2
; AVX512F-32-NEXT: kxorq %k3, %k2, %k3
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -1927,12 +1927,12 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $12, %edx
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $53, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k7, %k3, %k3
+; AVX512F-32-NEXT: kxorq %k3, %k7, %k3
; AVX512F-32-NEXT: kshiftrq $11, %k3, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $52, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
+; AVX512F-32-NEXT: kxorq %k0, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $12, %k3, %k0
; AVX512F-32-NEXT: kmovd %edx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
@@ -1943,24 +1943,24 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $14, %edi
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $51, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k3, %k7, %k3
+; AVX512F-32-NEXT: kxorq %k7, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $13, %k3, %k7
; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $50, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $14, %k3, %k4
; AVX512F-32-NEXT: kmovd %edi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $49, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $15, %k3, %k4
; AVX512F-32-NEXT: kmovd %esi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $48, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $16, %k3, %k4
; AVX512F-32-NEXT: kmovd %eax, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
@@ -1971,14 +1971,14 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $7, %al
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $47, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $17, %k3, %k4
; AVX512F-32-NEXT: kxorq %k5, %k4, %k4
; AVX512F-32-NEXT: kmovd %eax, %k5
; AVX512F-32-NEXT: movl %edx, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $46, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k4
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k4
; AVX512F-32-NEXT: kshiftrq $18, %k4, %k3
; AVX512F-32-NEXT: kxorq %k6, %k3, %k6
; AVX512F-32-NEXT: kmovd %edx, %k3
@@ -1988,7 +1988,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %al
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $45, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k4, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k4, %k6
; AVX512F-32-NEXT: kshiftrq $19, %k6, %k4
; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
; AVX512F-32-NEXT: kmovd %eax, %k4
@@ -1996,7 +1996,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $44, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k6, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kshiftrq $20, %k1, %k6
; AVX512F-32-NEXT: kxorq %k2, %k6, %k6
; AVX512F-32-NEXT: kmovd %edx, %k2
@@ -2005,7 +2005,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $43, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k6
; AVX512F-32-NEXT: kshiftrq $21, %k6, %k1
; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -2014,7 +2014,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $42, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k6, %k0, %k6
+; AVX512F-32-NEXT: kxorq %k0, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $22, %k6, %k0
; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -2023,7 +2023,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $41, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $23, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k5
@@ -2031,7 +2031,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $40, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k7
; AVX512F-32-NEXT: kshiftrq $24, %k7, %k6
; AVX512F-32-NEXT: kxorq %k3, %k6, %k3
; AVX512F-32-NEXT: kmovd %eax, %k6
@@ -2040,7 +2040,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $39, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k7, %k3, %k7
+; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $25, %k7, %k3
; AVX512F-32-NEXT: kxorq %k4, %k3, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k3
@@ -2048,7 +2048,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $38, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
+; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
; AVX512F-32-NEXT: kshiftrq $26, %k4, %k7
; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -2059,12 +2059,12 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $28, %edx
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $37, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
+; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $27, %k4, %k7
; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $36, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
; AVX512F-32-NEXT: kshiftrq $28, %k1, %k4
; AVX512F-32-NEXT: kmovd %edx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k7
@@ -2077,24 +2077,24 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $30, %esi
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $35, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
+; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $29, %k1, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $34, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kshiftrq $30, %k0, %k1
; AVX512F-32-NEXT: kmovd %esi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $33, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $31, %k0, %k1
; AVX512F-32-NEXT: kmovd %ecx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $32, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $32, %k0, %k1
; AVX512F-32-NEXT: kmovd %ebx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
@@ -2103,12 +2103,12 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $7, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $31, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $33, %k0, %k1
; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $30, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $34, %k0, %k1
; AVX512F-32-NEXT: kxorq %k6, %k1, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k6
@@ -2118,7 +2118,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $29, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k0, %k5, %k5
+; AVX512F-32-NEXT: kxorq %k5, %k0, %k5
; AVX512F-32-NEXT: kshiftrq $35, %k5, %k0
; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -2126,7 +2126,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %al
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $28, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k5, %k3, %k5
+; AVX512F-32-NEXT: kxorq %k3, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $36, %k5, %k3
; AVX512F-32-NEXT: kxorq %k2, %k3, %k2
; AVX512F-32-NEXT: kmovd %eax, %k3
@@ -2135,7 +2135,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $27, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k5, %k2, %k2
+; AVX512F-32-NEXT: kxorq %k2, %k5, %k2
; AVX512F-32-NEXT: kshiftrq $37, %k2, %k5
; AVX512F-32-NEXT: kxorq %k4, %k5, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -2144,7 +2144,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $26, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k2, %k5, %k2
+; AVX512F-32-NEXT: kxorq %k5, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $38, %k2, %k5
; AVX512F-32-NEXT: kxorq %k7, %k5, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k5
@@ -2153,7 +2153,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $25, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
; AVX512F-32-NEXT: kshiftrq $39, %k7, %k2
; AVX512F-32-NEXT: kxorq %k6, %k2, %k6
; AVX512F-32-NEXT: kmovd %edx, %k2
@@ -2163,7 +2163,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $24, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
; AVX512F-32-NEXT: kshiftrq $40, %k6, %k7
; AVX512F-32-NEXT: kxorq %k1, %k7, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -2174,28 +2174,28 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $12, %ecx
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $23, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $41, %k6, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k1
; AVX512F-32-NEXT: shrl $14, %edi
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $22, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k6, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k6, %k0
; AVX512F-32-NEXT: kshiftrq $42, %k0, %k6
; AVX512F-32-NEXT: kxorq %k3, %k6, %k3
; AVX512F-32-NEXT: kmovd %edi, %k7
; AVX512F-32-NEXT: shrl $15, %esi
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $21, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k0, %k3, %k0
+; AVX512F-32-NEXT: kxorq %k3, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $43, %k0, %k3
; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kmovd %esi, %k6
; AVX512F-32-NEXT: shrb $3, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $20, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k0, %k3, %k3
+; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
; AVX512F-32-NEXT: kshiftrq $44, %k3, %k0
; AVX512F-32-NEXT: kxorq %k1, %k0, %k1
; AVX512F-32-NEXT: kmovd %edx, %k0
@@ -2203,7 +2203,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $19, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k3, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k3, %k1
; AVX512F-32-NEXT: kshiftrq $45, %k1, %k3
; AVX512F-32-NEXT: kxorq %k5, %k3, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k3
@@ -2212,7 +2212,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $18, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
+; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $46, %k1, %k4
; AVX512F-32-NEXT: kxorq %k7, %k4, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -2220,12 +2220,12 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $6, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $17, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k1, %k5, %k1
+; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $47, %k1, %k5
; AVX512F-32-NEXT: kxorq %k6, %k5, %k5
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $16, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k1, %k5, %k1
+; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $48, %k1, %k5
; AVX512F-32-NEXT: kmovd %eax, %k6
; AVX512F-32-NEXT: kxorq %k6, %k5, %k6
@@ -2236,14 +2236,14 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $7, %al
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $15, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $49, %k1, %k6
; AVX512F-32-NEXT: kxorq %k2, %k6, %k6
; AVX512F-32-NEXT: kmovd %eax, %k2
; AVX512F-32-NEXT: movl %edx, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $14, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k6
; AVX512F-32-NEXT: kshiftrq $50, %k6, %k1
; AVX512F-32-NEXT: kmovq {{[0-9]+}}(%esp), %k7 # 8-byte Reload
; AVX512F-32-NEXT: kxorq %k7, %k1, %k7
@@ -2254,7 +2254,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $13, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $51, %k6, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k0
@@ -2262,14 +2262,14 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $12, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $52, %k6, %k7
; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k3
; AVX512F-32-NEXT: shrb $3, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $11, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $53, %k6, %k7
; AVX512F-32-NEXT: kxorq %k4, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k4
@@ -2278,40 +2278,40 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: andb $1, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $10, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $54, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k5
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $9, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $55, %k6, %k7
; AVX512F-32-NEXT: kxorq %k2, %k7, %k2
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $8, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k6, %k2, %k2
+; AVX512F-32-NEXT: kxorq %k2, %k6, %k2
; AVX512F-32-NEXT: kshiftrq $56, %k2, %k6
; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $7, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k2, %k1
; AVX512F-32-NEXT: kshiftrq $57, %k1, %k2
; AVX512F-32-NEXT: kxorq %k0, %k2, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $6, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kshiftrq $58, %k0, %k1
; AVX512F-32-NEXT: kxorq %k3, %k1, %k1
; AVX512F-32-NEXT: movl %ebx, %eax
; AVX512F-32-NEXT: shrl $28, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $5, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $59, %k0, %k1
; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $4, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $60, %k0, %k1
; AVX512F-32-NEXT: kmovd %eax, %k2
; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
@@ -2321,18 +2321,18 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: shrl $30, %ecx
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $3, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $61, %k0, %k1
; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $2, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $62, %k0, %k1
; AVX512F-32-NEXT: kmovd %ecx, %k2
; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $1, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftlq $1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $1, %k0, %k0
; AVX512F-32-NEXT: kmovd %eax, %k1
@@ -2544,7 +2544,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $62, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k5, %k1, %k7
+; AVX512F-32-NEXT: kxorq %k1, %k5, %k7
; AVX512F-32-NEXT: kshiftrq $2, %k7, %k1
; AVX512F-32-NEXT: kxorq %k2, %k1, %k2
; AVX512F-32-NEXT: kmovd %ecx, %k5
@@ -2555,7 +2555,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $61, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
+; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $3, %k7, %k2
; AVX512F-32-NEXT: kxorq %k0, %k2, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -2563,7 +2563,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $60, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k7, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftrq $4, %k0, %k7
; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k3
@@ -2572,7 +2572,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $59, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k0, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
; AVX512F-32-NEXT: kshiftrq $5, %k7, %k0
; AVX512F-32-NEXT: kxorq %k4, %k0, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -2581,7 +2581,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $58, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k7, %k4, %k7
+; AVX512F-32-NEXT: kxorq %k4, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $6, %k7, %k4
; AVX512F-32-NEXT: kxorq %k6, %k4, %k6
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -2590,7 +2590,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %bl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $57, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
; AVX512F-32-NEXT: kshiftrq $7, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %ebx, %k5
@@ -2598,7 +2598,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $56, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k7
; AVX512F-32-NEXT: kshiftrq $8, %k7, %k6
; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kmovd %edx, %k6
@@ -2606,7 +2606,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $55, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k7, %k1, %k7
+; AVX512F-32-NEXT: kxorq %k1, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $9, %k7, %k1
; AVX512F-32-NEXT: kxorq %k2, %k1, %k2
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -2614,7 +2614,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $54, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
+; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $10, %k7, %k2
; AVX512F-32-NEXT: kxorq %k3, %k2, %k3
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -2627,12 +2627,12 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $12, %edx
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $53, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k7, %k3, %k3
+; AVX512F-32-NEXT: kxorq %k3, %k7, %k3
; AVX512F-32-NEXT: kshiftrq $11, %k3, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $52, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
+; AVX512F-32-NEXT: kxorq %k0, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $12, %k3, %k0
; AVX512F-32-NEXT: kmovd %edx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
@@ -2643,24 +2643,24 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $14, %edi
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $51, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k3, %k7, %k3
+; AVX512F-32-NEXT: kxorq %k7, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $13, %k3, %k7
; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $50, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $14, %k3, %k4
; AVX512F-32-NEXT: kmovd %edi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $49, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $15, %k3, %k4
; AVX512F-32-NEXT: kmovd %esi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $48, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $16, %k3, %k4
; AVX512F-32-NEXT: kmovd %eax, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
@@ -2671,14 +2671,14 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $7, %al
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $47, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k3
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $17, %k3, %k4
; AVX512F-32-NEXT: kxorq %k5, %k4, %k4
; AVX512F-32-NEXT: kmovd %eax, %k5
; AVX512F-32-NEXT: movl %edx, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $46, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k3, %k4, %k4
+; AVX512F-32-NEXT: kxorq %k4, %k3, %k4
; AVX512F-32-NEXT: kshiftrq $18, %k4, %k3
; AVX512F-32-NEXT: kxorq %k6, %k3, %k6
; AVX512F-32-NEXT: kmovd %edx, %k3
@@ -2688,7 +2688,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %al
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $45, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k4, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k4, %k6
; AVX512F-32-NEXT: kshiftrq $19, %k6, %k4
; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
; AVX512F-32-NEXT: kmovd %eax, %k4
@@ -2696,7 +2696,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $44, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k6, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kshiftrq $20, %k1, %k6
; AVX512F-32-NEXT: kxorq %k2, %k6, %k6
; AVX512F-32-NEXT: kmovd %edx, %k2
@@ -2705,7 +2705,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $43, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k6
; AVX512F-32-NEXT: kshiftrq $21, %k6, %k1
; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -2714,7 +2714,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $42, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k6, %k0, %k6
+; AVX512F-32-NEXT: kxorq %k0, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $22, %k6, %k0
; AVX512F-32-NEXT: kxorq %k7, %k0, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -2723,7 +2723,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $41, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $23, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k5
@@ -2731,7 +2731,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $40, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k7
; AVX512F-32-NEXT: kshiftrq $24, %k7, %k6
; AVX512F-32-NEXT: kxorq %k3, %k6, %k3
; AVX512F-32-NEXT: kmovd %eax, %k6
@@ -2740,7 +2740,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $39, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k7, %k3, %k7
+; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $25, %k7, %k3
; AVX512F-32-NEXT: kxorq %k4, %k3, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k3
@@ -2748,7 +2748,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $38, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
+; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
; AVX512F-32-NEXT: kshiftrq $26, %k4, %k7
; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k2
@@ -2759,12 +2759,12 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $28, %edx
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $37, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k4, %k7, %k4
+; AVX512F-32-NEXT: kxorq %k7, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $27, %k4, %k7
; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $36, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
; AVX512F-32-NEXT: kshiftrq $28, %k1, %k4
; AVX512F-32-NEXT: kmovd %edx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k4, %k7
@@ -2777,24 +2777,24 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $30, %esi
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $35, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k1, %k7, %k1
+; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $29, %k1, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $34, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kshiftrq $30, %k0, %k1
; AVX512F-32-NEXT: kmovd %esi, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $33, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $31, %k0, %k1
; AVX512F-32-NEXT: kmovd %ecx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $32, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $32, %k0, %k1
; AVX512F-32-NEXT: kmovd %ebx, %k7
; AVX512F-32-NEXT: kxorq %k7, %k1, %k1
@@ -2803,12 +2803,12 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $7, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $31, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $33, %k0, %k1
; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $30, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $34, %k0, %k1
; AVX512F-32-NEXT: kxorq %k6, %k1, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k6
@@ -2818,7 +2818,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $29, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k0, %k5, %k5
+; AVX512F-32-NEXT: kxorq %k5, %k0, %k5
; AVX512F-32-NEXT: kshiftrq $35, %k5, %k0
; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
; AVX512F-32-NEXT: kmovd %ecx, %k0
@@ -2826,7 +2826,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %al
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $28, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k5, %k3, %k5
+; AVX512F-32-NEXT: kxorq %k3, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $36, %k5, %k3
; AVX512F-32-NEXT: kxorq %k2, %k3, %k2
; AVX512F-32-NEXT: kmovd %eax, %k3
@@ -2835,7 +2835,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $3, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $27, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k5, %k2, %k2
+; AVX512F-32-NEXT: kxorq %k2, %k5, %k2
; AVX512F-32-NEXT: kshiftrq $37, %k2, %k5
; AVX512F-32-NEXT: kxorq %k4, %k5, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -2844,7 +2844,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $26, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k2, %k5, %k2
+; AVX512F-32-NEXT: kxorq %k5, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $38, %k2, %k5
; AVX512F-32-NEXT: kxorq %k7, %k5, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k5
@@ -2853,7 +2853,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $25, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k2, %k7, %k7
+; AVX512F-32-NEXT: kxorq %k7, %k2, %k7
; AVX512F-32-NEXT: kshiftrq $39, %k7, %k2
; AVX512F-32-NEXT: kxorq %k6, %k2, %k6
; AVX512F-32-NEXT: kmovd %edx, %k2
@@ -2863,7 +2863,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $24, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
; AVX512F-32-NEXT: kshiftrq $40, %k6, %k7
; AVX512F-32-NEXT: kxorq %k1, %k7, %k7
; AVX512F-32-NEXT: kmovd %ecx, %k1
@@ -2874,28 +2874,28 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $12, %ecx
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $23, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $41, %k6, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k0
; AVX512F-32-NEXT: kmovd %ecx, %k1
; AVX512F-32-NEXT: shrl $14, %edi
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $22, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k6, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k6, %k0
; AVX512F-32-NEXT: kshiftrq $42, %k0, %k6
; AVX512F-32-NEXT: kxorq %k3, %k6, %k3
; AVX512F-32-NEXT: kmovd %edi, %k7
; AVX512F-32-NEXT: shrl $15, %esi
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $21, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k0, %k3, %k0
+; AVX512F-32-NEXT: kxorq %k3, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $43, %k0, %k3
; AVX512F-32-NEXT: kxorq %k4, %k3, %k3
; AVX512F-32-NEXT: kmovd %esi, %k6
; AVX512F-32-NEXT: shrb $3, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k3, %k3
; AVX512F-32-NEXT: kshiftrq $20, %k3, %k3
-; AVX512F-32-NEXT: kxorq %k0, %k3, %k3
+; AVX512F-32-NEXT: kxorq %k3, %k0, %k3
; AVX512F-32-NEXT: kshiftrq $44, %k3, %k0
; AVX512F-32-NEXT: kxorq %k1, %k0, %k1
; AVX512F-32-NEXT: kmovd %edx, %k0
@@ -2903,7 +2903,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $4, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $19, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k3, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k3, %k1
; AVX512F-32-NEXT: kshiftrq $45, %k1, %k3
; AVX512F-32-NEXT: kxorq %k5, %k3, %k4
; AVX512F-32-NEXT: kmovd %ecx, %k3
@@ -2912,7 +2912,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: andb $1, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k4, %k4
; AVX512F-32-NEXT: kshiftrq $18, %k4, %k4
-; AVX512F-32-NEXT: kxorq %k1, %k4, %k1
+; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $46, %k1, %k4
; AVX512F-32-NEXT: kxorq %k7, %k4, %k5
; AVX512F-32-NEXT: kmovd %ecx, %k4
@@ -2920,12 +2920,12 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $6, %cl
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $17, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k1, %k5, %k1
+; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $47, %k1, %k5
; AVX512F-32-NEXT: kxorq %k6, %k5, %k5
; AVX512F-32-NEXT: kshiftlq $63, %k5, %k5
; AVX512F-32-NEXT: kshiftrq $16, %k5, %k5
-; AVX512F-32-NEXT: kxorq %k1, %k5, %k1
+; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $48, %k1, %k5
; AVX512F-32-NEXT: kmovd %eax, %k6
; AVX512F-32-NEXT: kxorq %k6, %k5, %k6
@@ -2936,14 +2936,14 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $7, %al
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $15, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $49, %k1, %k6
; AVX512F-32-NEXT: kxorq %k2, %k6, %k6
; AVX512F-32-NEXT: kmovd %eax, %k2
; AVX512F-32-NEXT: movl %edx, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $14, %k6, %k6
-; AVX512F-32-NEXT: kxorq %k1, %k6, %k6
+; AVX512F-32-NEXT: kxorq %k6, %k1, %k6
; AVX512F-32-NEXT: kshiftrq $50, %k6, %k1
; AVX512F-32-NEXT: kmovq {{[0-9]+}}(%esp), %k7 # 8-byte Reload
; AVX512F-32-NEXT: kxorq %k7, %k1, %k7
@@ -2954,7 +2954,7 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $13, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $51, %k6, %k7
; AVX512F-32-NEXT: kxorq %k0, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k0
@@ -2962,14 +2962,14 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrb $2, %dl
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $12, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $52, %k6, %k7
; AVX512F-32-NEXT: kxorq %k3, %k7, %k7
; AVX512F-32-NEXT: kmovd %edx, %k3
; AVX512F-32-NEXT: shrb $3, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $11, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $53, %k6, %k7
; AVX512F-32-NEXT: kxorq %k4, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k4
@@ -2978,40 +2978,40 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: andb $1, %al
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $10, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $54, %k6, %k7
; AVX512F-32-NEXT: kxorq %k5, %k7, %k7
; AVX512F-32-NEXT: kmovd %eax, %k5
; AVX512F-32-NEXT: kshiftlq $63, %k7, %k7
; AVX512F-32-NEXT: kshiftrq $9, %k7, %k7
-; AVX512F-32-NEXT: kxorq %k6, %k7, %k6
+; AVX512F-32-NEXT: kxorq %k7, %k6, %k6
; AVX512F-32-NEXT: kshiftrq $55, %k6, %k7
; AVX512F-32-NEXT: kxorq %k2, %k7, %k2
; AVX512F-32-NEXT: kshiftlq $63, %k2, %k2
; AVX512F-32-NEXT: kshiftrq $8, %k2, %k2
-; AVX512F-32-NEXT: kxorq %k6, %k2, %k2
+; AVX512F-32-NEXT: kxorq %k2, %k6, %k2
; AVX512F-32-NEXT: kshiftrq $56, %k2, %k6
; AVX512F-32-NEXT: kxorq %k1, %k6, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $7, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
+; AVX512F-32-NEXT: kxorq %k1, %k2, %k1
; AVX512F-32-NEXT: kshiftrq $57, %k1, %k2
; AVX512F-32-NEXT: kxorq %k0, %k2, %k0
; AVX512F-32-NEXT: kshiftlq $63, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $6, %k0, %k0
-; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
+; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
; AVX512F-32-NEXT: kshiftrq $58, %k0, %k1
; AVX512F-32-NEXT: kxorq %k3, %k1, %k1
; AVX512F-32-NEXT: movl %ebx, %eax
; AVX512F-32-NEXT: shrl $28, %eax
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $5, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $59, %k0, %k1
; AVX512F-32-NEXT: kxorq %k4, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $4, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $60, %k0, %k1
; AVX512F-32-NEXT: kmovd %eax, %k2
; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
@@ -3021,18 +3021,18 @@ define i64 @test_mask_x86_avx512_ucmp_b_
; AVX512F-32-NEXT: shrl $30, %ecx
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $3, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $61, %k0, %k1
; AVX512F-32-NEXT: kxorq %k5, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $2, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $62, %k0, %k1
; AVX512F-32-NEXT: kmovd %ecx, %k2
; AVX512F-32-NEXT: kxorq %k2, %k1, %k1
; AVX512F-32-NEXT: kshiftlq $63, %k1, %k1
; AVX512F-32-NEXT: kshiftrq $1, %k1, %k1
-; AVX512F-32-NEXT: kxorq %k0, %k1, %k0
+; AVX512F-32-NEXT: kxorq %k1, %k0, %k0
; AVX512F-32-NEXT: kshiftlq $1, %k0, %k0
; AVX512F-32-NEXT: kshiftrq $1, %k0, %k0
; AVX512F-32-NEXT: kmovd %eax, %k1
More information about the llvm-commits
mailing list