[llvm] r325527 - [X86] Stop swapping the operands of AVX512 setge.
Craig Topper via llvm-commits
llvm-commits at lists.llvm.org
Mon Feb 19 11:23:35 PST 2018
Author: ctopper
Date: Mon Feb 19 11:23:35 2018
New Revision: 325527
URL: http://llvm.org/viewvc/llvm-project?rev=325527&view=rev
Log:
[X86] Stop swapping the operands of AVX512 setge.
We swapped the operands and used setle, but I don't see any reason to do that. I think this is a holdover from SSE where we swap and the invert to use pcmpgt. But with AVX512 we don't want an invert so we won't use pcmpgt. So there's no need to swap.
Modified:
llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
llvm/trunk/test/CodeGen/X86/avx512-intrinsics-upgrade.ll
llvm/trunk/test/CodeGen/X86/avx512-vec-cmp.ll
llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll
llvm/trunk/test/CodeGen/X86/avx512bw-vec-cmp.ll
llvm/trunk/test/CodeGen/X86/avx512bwvl-intrinsics-upgrade.ll
llvm/trunk/test/CodeGen/X86/avx512bwvl-vec-cmp.ll
llvm/trunk/test/CodeGen/X86/avx512vl-intrinsics-upgrade.ll
llvm/trunk/test/CodeGen/X86/avx512vl-vec-cmp.ll
llvm/trunk/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll
Modified: llvm/trunk/lib/Target/X86/X86ISelLowering.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Target/X86/X86ISelLowering.cpp?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/lib/Target/X86/X86ISelLowering.cpp (original)
+++ llvm/trunk/lib/Target/X86/X86ISelLowering.cpp Mon Feb 19 11:23:35 2018
@@ -17848,8 +17848,8 @@ static SDValue LowerIntVSETCC_AVX512(SDV
case ISD::SETLT: Swap = true; LLVM_FALLTHROUGH;
case ISD::SETUGT:
case ISD::SETGT: SSECC = 6; break;
- case ISD::SETUGE: SSECC = 5; break;
- case ISD::SETGE: Swap = true; LLVM_FALLTHROUGH;
+ case ISD::SETUGE:
+ case ISD::SETGE: SSECC = 5; break;
case ISD::SETULE:
case ISD::SETLE: SSECC = 2; break;
}
Modified: llvm/trunk/test/CodeGen/X86/avx512-intrinsics-upgrade.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-intrinsics-upgrade.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-intrinsics-upgrade.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-intrinsics-upgrade.ll Mon Feb 19 11:23:35 2018
@@ -3149,7 +3149,7 @@ define <8 x i16> @test_cmp_d_512(<16 x i
; CHECK-NEXT: vpcmpgtd %zmm0, %zmm1, %k1
; CHECK-NEXT: vpcmpled %zmm1, %zmm0, %k2
; CHECK-NEXT: vpcmpneqd %zmm1, %zmm0, %k3
-; CHECK-NEXT: vpcmpled %zmm0, %zmm1, %k4
+; CHECK-NEXT: vpcmpnltd %zmm1, %zmm0, %k4
; CHECK-NEXT: vpcmpgtd %zmm1, %zmm0, %k5
; CHECK-NEXT: kmovw %k0, %eax
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
@@ -3194,7 +3194,7 @@ define <8 x i16> @test_mask_cmp_d_512(<1
; CHECK-NEXT: vpcmpgtd %zmm0, %zmm1, %k2 {%k1}
; CHECK-NEXT: vpcmpled %zmm1, %zmm0, %k3 {%k1}
; CHECK-NEXT: vpcmpneqd %zmm1, %zmm0, %k4 {%k1}
-; CHECK-NEXT: vpcmpled %zmm0, %zmm1, %k5 {%k1}
+; CHECK-NEXT: vpcmpnltd %zmm1, %zmm0, %k5 {%k1}
; CHECK-NEXT: vpcmpgtd %zmm1, %zmm0, %k1 {%k1}
; CHECK-NEXT: kmovw %k0, %eax
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
@@ -3329,7 +3329,7 @@ define <8 x i8> @test_cmp_q_512(<8 x i64
; CHECK-NEXT: vpcmpgtq %zmm0, %zmm1, %k1
; CHECK-NEXT: vpcmpleq %zmm1, %zmm0, %k2
; CHECK-NEXT: vpcmpneqq %zmm1, %zmm0, %k3
-; CHECK-NEXT: vpcmpleq %zmm0, %zmm1, %k4
+; CHECK-NEXT: vpcmpnltq %zmm1, %zmm0, %k4
; CHECK-NEXT: vpcmpgtq %zmm1, %zmm0, %k5
; CHECK-NEXT: kmovw %k0, %eax
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
@@ -3374,7 +3374,7 @@ define <8 x i8> @test_mask_cmp_q_512(<8
; CHECK-NEXT: vpcmpgtq %zmm0, %zmm1, %k2 {%k1}
; CHECK-NEXT: vpcmpleq %zmm1, %zmm0, %k3 {%k1}
; CHECK-NEXT: vpcmpneqq %zmm1, %zmm0, %k4 {%k1}
-; CHECK-NEXT: vpcmpleq %zmm0, %zmm1, %k5 {%k1}
+; CHECK-NEXT: vpcmpnltq %zmm1, %zmm0, %k5 {%k1}
; CHECK-NEXT: vpcmpgtq %zmm1, %zmm0, %k1 {%k1}
; CHECK-NEXT: kmovw %k0, %eax
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0
Modified: llvm/trunk/test/CodeGen/X86/avx512-vec-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512-vec-cmp.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512-vec-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512-vec-cmp.ll Mon Feb 19 11:23:35 2018
@@ -345,7 +345,7 @@ define <8 x i64> @test15(<8 x i64>%a, <8
define <16 x i32> @test16(<16 x i32> %x, <16 x i32> %y, <16 x i32> %x1) nounwind {
; CHECK-LABEL: test16:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmpled %zmm0, %zmm1, %k1 ## encoding: [0x62,0xf3,0x75,0x48,0x1f,0xc8,0x02]
+; CHECK-NEXT: vpcmpnltd %zmm1, %zmm0, %k1 ## encoding: [0x62,0xf3,0x7d,0x48,0x1f,0xc9,0x05]
; CHECK-NEXT: vpblendmd %zmm2, %zmm1, %zmm0 {%k1} ## encoding: [0x62,0xf2,0x75,0x49,0x64,0xc2]
; CHECK-NEXT: retq ## encoding: [0xc3]
%mask = icmp sge <16 x i32> %x, %y
@@ -407,7 +407,7 @@ define <8 x i64> @test21(<8 x i64> %x, <
; CHECK-LABEL: test21:
; CHECK: ## %bb.0:
; CHECK-NEXT: vpcmpleq %zmm1, %zmm0, %k1 ## encoding: [0x62,0xf3,0xfd,0x48,0x1f,0xc9,0x02]
-; CHECK-NEXT: vpcmpleq %zmm2, %zmm3, %k1 {%k1} ## encoding: [0x62,0xf3,0xe5,0x49,0x1f,0xca,0x02]
+; CHECK-NEXT: vpcmpnltq %zmm3, %zmm2, %k1 {%k1} ## encoding: [0x62,0xf3,0xed,0x49,0x1f,0xcb,0x05]
; CHECK-NEXT: vpblendmq %zmm0, %zmm2, %zmm0 {%k1} ## encoding: [0x62,0xf2,0xed,0x49,0x64,0xc0]
; CHECK-NEXT: retq ## encoding: [0xc3]
%mask1 = icmp sge <8 x i64> %x1, %y1
@@ -435,7 +435,7 @@ define <8 x i64> @test22(<8 x i64> %x, <
define <16 x i32> @test23(<16 x i32> %x, <16 x i32>* %y.ptr, <16 x i32> %x1, <16 x i32> %y1) nounwind {
; CHECK-LABEL: test23:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmpled %zmm1, %zmm2, %k1 ## encoding: [0x62,0xf3,0x6d,0x48,0x1f,0xc9,0x02]
+; CHECK-NEXT: vpcmpnltd %zmm2, %zmm1, %k1 ## encoding: [0x62,0xf3,0x75,0x48,0x1f,0xca,0x05]
; CHECK-NEXT: vpcmpleud (%rdi), %zmm0, %k1 {%k1} ## encoding: [0x62,0xf3,0x7d,0x49,0x1e,0x0f,0x02]
; CHECK-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1} ## encoding: [0x62,0xf2,0x75,0x49,0x64,0xc0]
; CHECK-NEXT: retq ## encoding: [0xc3]
@@ -478,7 +478,7 @@ define <16 x i32> @test25(<16 x i32> %x,
define <16 x i32> @test26(<16 x i32> %x, i32* %yb.ptr, <16 x i32> %x1, <16 x i32> %y1) nounwind {
; CHECK-LABEL: test26:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmpled %zmm1, %zmm2, %k1 ## encoding: [0x62,0xf3,0x6d,0x48,0x1f,0xc9,0x02]
+; CHECK-NEXT: vpcmpnltd %zmm2, %zmm1, %k1 ## encoding: [0x62,0xf3,0x75,0x48,0x1f,0xca,0x05]
; CHECK-NEXT: vpcmpgtd (%rdi){1to16}, %zmm0, %k1 {%k1} ## encoding: [0x62,0xf1,0x7d,0x59,0x66,0x0f]
; CHECK-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1} ## encoding: [0x62,0xf2,0x75,0x49,0x64,0xc0]
; CHECK-NEXT: retq ## encoding: [0xc3]
@@ -495,7 +495,7 @@ define <16 x i32> @test26(<16 x i32> %x,
define <8 x i64> @test27(<8 x i64> %x, i64* %yb.ptr, <8 x i64> %x1, <8 x i64> %y1) nounwind {
; CHECK-LABEL: test27:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmpleq %zmm1, %zmm2, %k1 ## encoding: [0x62,0xf3,0xed,0x48,0x1f,0xc9,0x02]
+; CHECK-NEXT: vpcmpnltq %zmm2, %zmm1, %k1 ## encoding: [0x62,0xf3,0xf5,0x48,0x1f,0xca,0x05]
; CHECK-NEXT: vpcmpleq (%rdi){1to8}, %zmm0, %k1 {%k1} ## encoding: [0x62,0xf3,0xfd,0x59,0x1f,0x0f,0x02]
; CHECK-NEXT: vpblendmq %zmm0, %zmm1, %zmm0 {%k1} ## encoding: [0x62,0xf2,0xf5,0x49,0x64,0xc0]
; CHECK-NEXT: retq ## encoding: [0xc3]
Modified: llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bw-intrinsics-upgrade.ll Mon Feb 19 11:23:35 2018
@@ -1688,7 +1688,7 @@ define i64 @test_cmp_b_512(<64 x i8> %a0
; AVX512BW-NEXT: vpcmpneqb %zmm1, %zmm0, %k0
; AVX512BW-NEXT: kmovq %k0, %rcx
; AVX512BW-NEXT: addq %rax, %rcx
-; AVX512BW-NEXT: vpcmpleb %zmm0, %zmm1, %k0
+; AVX512BW-NEXT: vpcmpnltb %zmm1, %zmm0, %k0
; AVX512BW-NEXT: kmovq %k0, %rax
; AVX512BW-NEXT: addq %rcx, %rax
; AVX512BW-NEXT: vpcmpgtb %zmm1, %zmm0, %k0
@@ -1727,7 +1727,7 @@ define i64 @test_cmp_b_512(<64 x i8> %a0
; AVX512F-32-NEXT: kmovd %k0, %esi
; AVX512F-32-NEXT: addl %ecx, %esi
; AVX512F-32-NEXT: adcl %eax, %edx
-; AVX512F-32-NEXT: vpcmpleb %zmm0, %zmm1, %k0
+; AVX512F-32-NEXT: vpcmpnltb %zmm1, %zmm0, %k0
; AVX512F-32-NEXT: kshiftrq $32, %k0, %k1
; AVX512F-32-NEXT: kmovd %k1, %ecx
; AVX512F-32-NEXT: kmovd %k0, %edi
@@ -1778,7 +1778,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512BW-NEXT: vpcmpneqb %zmm1, %zmm0, %k0 {%k1}
; AVX512BW-NEXT: kmovq %k0, %rcx
; AVX512BW-NEXT: addq %rax, %rcx
-; AVX512BW-NEXT: vpcmpleb %zmm0, %zmm1, %k0 {%k1}
+; AVX512BW-NEXT: vpcmpnltb %zmm1, %zmm0, %k0 {%k1}
; AVX512BW-NEXT: kmovq %k0, %rdx
; AVX512BW-NEXT: addq %rcx, %rdx
; AVX512BW-NEXT: vpcmpgtb %zmm1, %zmm0, %k0 {%k1}
@@ -1836,7 +1836,7 @@ define i64 @test_mask_cmp_b_512(<64 x i8
; AVX512F-32-NEXT: kmovd %k2, %ebx
; AVX512F-32-NEXT: addl %edx, %ebx
; AVX512F-32-NEXT: adcl %eax, %edi
-; AVX512F-32-NEXT: vpcmpleb %zmm0, %zmm1, %k2
+; AVX512F-32-NEXT: vpcmpnltb %zmm1, %zmm0, %k2
; AVX512F-32-NEXT: kshiftrq $32, %k2, %k3
; AVX512F-32-NEXT: kandd %k1, %k3, %k3
; AVX512F-32-NEXT: kmovd %k3, %ebp
@@ -2100,7 +2100,7 @@ define i32 @test_cmp_w_512(<32 x i16> %a
; AVX512BW-NEXT: vpcmpneqw %zmm1, %zmm0, %k0
; AVX512BW-NEXT: kmovd %k0, %ecx
; AVX512BW-NEXT: addl %eax, %ecx
-; AVX512BW-NEXT: vpcmplew %zmm0, %zmm1, %k0
+; AVX512BW-NEXT: vpcmpnltw %zmm1, %zmm0, %k0
; AVX512BW-NEXT: kmovd %k0, %eax
; AVX512BW-NEXT: addl %ecx, %eax
; AVX512BW-NEXT: vpcmpgtw %zmm1, %zmm0, %k0
@@ -2122,7 +2122,7 @@ define i32 @test_cmp_w_512(<32 x i16> %a
; AVX512F-32-NEXT: vpcmpneqw %zmm1, %zmm0, %k0
; AVX512F-32-NEXT: kmovd %k0, %ecx
; AVX512F-32-NEXT: addl %eax, %ecx
-; AVX512F-32-NEXT: vpcmplew %zmm0, %zmm1, %k0
+; AVX512F-32-NEXT: vpcmpnltw %zmm1, %zmm0, %k0
; AVX512F-32-NEXT: kmovd %k0, %eax
; AVX512F-32-NEXT: addl %ecx, %eax
; AVX512F-32-NEXT: vpcmpgtw %zmm1, %zmm0, %k0
@@ -2163,7 +2163,7 @@ define i32 @test_mask_cmp_w_512(<32 x i1
; AVX512BW-NEXT: vpcmpneqw %zmm1, %zmm0, %k0 {%k1}
; AVX512BW-NEXT: kmovd %k0, %ecx
; AVX512BW-NEXT: addl %eax, %ecx
-; AVX512BW-NEXT: vpcmplew %zmm0, %zmm1, %k0 {%k1}
+; AVX512BW-NEXT: vpcmpnltw %zmm1, %zmm0, %k0 {%k1}
; AVX512BW-NEXT: kmovd %k0, %edx
; AVX512BW-NEXT: addl %ecx, %edx
; AVX512BW-NEXT: vpcmpgtw %zmm1, %zmm0, %k0 {%k1}
@@ -2191,7 +2191,7 @@ define i32 @test_mask_cmp_w_512(<32 x i1
; AVX512F-32-NEXT: vpcmpneqw %zmm1, %zmm0, %k0 {%k1}
; AVX512F-32-NEXT: kmovd %k0, %edx
; AVX512F-32-NEXT: addl %eax, %edx
-; AVX512F-32-NEXT: vpcmplew %zmm0, %zmm1, %k0 {%k1}
+; AVX512F-32-NEXT: vpcmpnltw %zmm1, %zmm0, %k0 {%k1}
; AVX512F-32-NEXT: kmovd %k0, %esi
; AVX512F-32-NEXT: addl %edx, %esi
; AVX512F-32-NEXT: vpcmpgtw %zmm1, %zmm0, %k0 {%k1}
Modified: llvm/trunk/test/CodeGen/X86/avx512bw-vec-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bw-vec-cmp.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bw-vec-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bw-vec-cmp.ll Mon Feb 19 11:23:35 2018
@@ -26,7 +26,7 @@ define <64 x i8> @test2(<64 x i8> %x, <6
define <32 x i16> @test3(<32 x i16> %x, <32 x i16> %y, <32 x i16> %x1) nounwind {
; CHECK-LABEL: test3:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %zmm0, %zmm1, %k1
+; CHECK-NEXT: vpcmpnltw %zmm1, %zmm0, %k1
; CHECK-NEXT: vpblendmw %zmm2, %zmm1, %zmm0 {%k1}
; CHECK-NEXT: retq
%mask = icmp sge <32 x i16> %x, %y
@@ -111,7 +111,7 @@ define <64 x i8> @test10(<64 x i8> %x, <
; CHECK-LABEL: test10:
; CHECK: ## %bb.0:
; CHECK-NEXT: vpcmpleb %zmm1, %zmm0, %k1
-; CHECK-NEXT: vpcmpleb %zmm2, %zmm3, %k1 {%k1}
+; CHECK-NEXT: vpcmpnltb %zmm3, %zmm2, %k1 {%k1}
; CHECK-NEXT: vpblendmb %zmm0, %zmm2, %zmm0 {%k1}
; CHECK-NEXT: retq
%mask1 = icmp sge <64 x i8> %x1, %y1
@@ -139,7 +139,7 @@ define <64 x i8> @test11(<64 x i8> %x, <
define <32 x i16> @test12(<32 x i16> %x, <32 x i16>* %y.ptr, <32 x i16> %x1, <32 x i16> %y1) nounwind {
; CHECK-LABEL: test12:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %zmm1, %zmm2, %k1
+; CHECK-NEXT: vpcmpnltw %zmm2, %zmm1, %k1
; CHECK-NEXT: vpcmpleuw (%rdi), %zmm0, %k1 {%k1}
; CHECK-NEXT: vpblendmw %zmm0, %zmm1, %zmm0 {%k1}
; CHECK-NEXT: retq
Modified: llvm/trunk/test/CodeGen/X86/avx512bwvl-intrinsics-upgrade.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bwvl-intrinsics-upgrade.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bwvl-intrinsics-upgrade.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bwvl-intrinsics-upgrade.ll Mon Feb 19 11:23:35 2018
@@ -2797,7 +2797,7 @@ define <8 x i32> @test_cmp_b_256(<32 x i
; CHECK-NEXT: kmovd %k0, %edx ## encoding: [0xc5,0xfb,0x93,0xd0]
; CHECK-NEXT: vpcmpneqb %ymm1, %ymm0, %k0 ## encoding: [0x62,0xf3,0x7d,0x28,0x3f,0xc1,0x04]
; CHECK-NEXT: kmovd %k0, %esi ## encoding: [0xc5,0xfb,0x93,0xf0]
-; CHECK-NEXT: vpcmpleb %ymm0, %ymm1, %k0 ## encoding: [0x62,0xf3,0x75,0x28,0x3f,0xc0,0x02]
+; CHECK-NEXT: vpcmpnltb %ymm1, %ymm0, %k0 ## encoding: [0x62,0xf3,0x7d,0x28,0x3f,0xc1,0x05]
; CHECK-NEXT: kmovd %k0, %edi ## encoding: [0xc5,0xfb,0x93,0xf8]
; CHECK-NEXT: vpcmpgtb %ymm1, %ymm0, %k0 ## encoding: [0x62,0xf1,0x7d,0x28,0x64,0xc1]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
@@ -2847,7 +2847,7 @@ define <8 x i32> @test_mask_cmp_b_256(<3
; CHECK-NEXT: kmovd %k0, %r9d ## encoding: [0xc5,0x7b,0x93,0xc8]
; CHECK-NEXT: vpcmpneqb %ymm1, %ymm0, %k0 {%k1} ## encoding: [0x62,0xf3,0x7d,0x29,0x3f,0xc1,0x04]
; CHECK-NEXT: kmovd %k0, %esi ## encoding: [0xc5,0xfb,0x93,0xf0]
-; CHECK-NEXT: vpcmpleb %ymm0, %ymm1, %k0 {%k1} ## encoding: [0x62,0xf3,0x75,0x29,0x3f,0xc0,0x02]
+; CHECK-NEXT: vpcmpnltb %ymm1, %ymm0, %k0 {%k1} ## encoding: [0x62,0xf3,0x7d,0x29,0x3f,0xc1,0x05]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpcmpgtb %ymm1, %ymm0, %k0 {%k1} ## encoding: [0x62,0xf1,0x7d,0x29,0x64,0xc1]
; CHECK-NEXT: kmovd %k0, %edx ## encoding: [0xc5,0xfb,0x93,0xd0]
@@ -2991,7 +2991,7 @@ define <8 x i16> @test_cmp_w_256(<16 x i
; CHECK-NEXT: vpcmpgtw %ymm0, %ymm1, %k1 ## encoding: [0x62,0xf1,0x75,0x28,0x65,0xc8]
; CHECK-NEXT: vpcmplew %ymm1, %ymm0, %k2 ## encoding: [0x62,0xf3,0xfd,0x28,0x3f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqw %ymm1, %ymm0, %k3 ## encoding: [0x62,0xf3,0xfd,0x28,0x3f,0xd9,0x04]
-; CHECK-NEXT: vpcmplew %ymm0, %ymm1, %k4 ## encoding: [0x62,0xf3,0xf5,0x28,0x3f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltw %ymm1, %ymm0, %k4 ## encoding: [0x62,0xf3,0xfd,0x28,0x3f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtw %ymm1, %ymm0, %k5 ## encoding: [0x62,0xf1,0x7d,0x28,0x65,0xe9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -3038,7 +3038,7 @@ define <8 x i16> @test_mask_cmp_w_256(<1
; CHECK-NEXT: vpcmpgtw %ymm0, %ymm1, %k2 {%k1} ## encoding: [0x62,0xf1,0x75,0x29,0x65,0xd0]
; CHECK-NEXT: vpcmplew %ymm1, %ymm0, %k3 {%k1} ## encoding: [0x62,0xf3,0xfd,0x29,0x3f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqw %ymm1, %ymm0, %k4 {%k1} ## encoding: [0x62,0xf3,0xfd,0x29,0x3f,0xe1,0x04]
-; CHECK-NEXT: vpcmplew %ymm0, %ymm1, %k5 {%k1} ## encoding: [0x62,0xf3,0xf5,0x29,0x3f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltw %ymm1, %ymm0, %k5 {%k1} ## encoding: [0x62,0xf3,0xfd,0x29,0x3f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtw %ymm1, %ymm0, %k1 {%k1} ## encoding: [0x62,0xf1,0x7d,0x29,0x65,0xc9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -3177,7 +3177,7 @@ define <8 x i16> @test_cmp_b_128(<16 x i
; CHECK-NEXT: vpcmpgtb %xmm0, %xmm1, %k1 ## encoding: [0x62,0xf1,0x75,0x08,0x64,0xc8]
; CHECK-NEXT: vpcmpleb %xmm1, %xmm0, %k2 ## encoding: [0x62,0xf3,0x7d,0x08,0x3f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqb %xmm1, %xmm0, %k3 ## encoding: [0x62,0xf3,0x7d,0x08,0x3f,0xd9,0x04]
-; CHECK-NEXT: vpcmpleb %xmm0, %xmm1, %k4 ## encoding: [0x62,0xf3,0x75,0x08,0x3f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltb %xmm1, %xmm0, %k4 ## encoding: [0x62,0xf3,0x7d,0x08,0x3f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtb %xmm1, %xmm0, %k5 ## encoding: [0x62,0xf1,0x7d,0x08,0x64,0xe9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -3223,7 +3223,7 @@ define <8 x i16> @test_mask_cmp_b_128(<1
; CHECK-NEXT: vpcmpgtb %xmm0, %xmm1, %k2 {%k1} ## encoding: [0x62,0xf1,0x75,0x09,0x64,0xd0]
; CHECK-NEXT: vpcmpleb %xmm1, %xmm0, %k3 {%k1} ## encoding: [0x62,0xf3,0x7d,0x09,0x3f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqb %xmm1, %xmm0, %k4 {%k1} ## encoding: [0x62,0xf3,0x7d,0x09,0x3f,0xe1,0x04]
-; CHECK-NEXT: vpcmpleb %xmm0, %xmm1, %k5 {%k1} ## encoding: [0x62,0xf3,0x75,0x09,0x3f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltb %xmm1, %xmm0, %k5 {%k1} ## encoding: [0x62,0xf3,0x7d,0x09,0x3f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtb %xmm1, %xmm0, %k1 {%k1} ## encoding: [0x62,0xf1,0x7d,0x09,0x64,0xc9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -3359,7 +3359,7 @@ define <8 x i8> @test_cmp_w_128(<8 x i16
; CHECK-NEXT: vpcmpgtw %xmm0, %xmm1, %k1 ## encoding: [0x62,0xf1,0x75,0x08,0x65,0xc8]
; CHECK-NEXT: vpcmplew %xmm1, %xmm0, %k2 ## encoding: [0x62,0xf3,0xfd,0x08,0x3f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqw %xmm1, %xmm0, %k3 ## encoding: [0x62,0xf3,0xfd,0x08,0x3f,0xd9,0x04]
-; CHECK-NEXT: vpcmplew %xmm0, %xmm1, %k4 ## encoding: [0x62,0xf3,0xf5,0x08,0x3f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltw %xmm1, %xmm0, %k4 ## encoding: [0x62,0xf3,0xfd,0x08,0x3f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtw %xmm1, %xmm0, %k5 ## encoding: [0x62,0xf1,0x7d,0x08,0x65,0xe9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -3404,7 +3404,7 @@ define <8 x i8> @test_mask_cmp_w_128(<8
; CHECK-NEXT: vpcmpgtw %xmm0, %xmm1, %k2 {%k1} ## encoding: [0x62,0xf1,0x75,0x09,0x65,0xd0]
; CHECK-NEXT: vpcmplew %xmm1, %xmm0, %k3 {%k1} ## encoding: [0x62,0xf3,0xfd,0x09,0x3f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqw %xmm1, %xmm0, %k4 {%k1} ## encoding: [0x62,0xf3,0xfd,0x09,0x3f,0xe1,0x04]
-; CHECK-NEXT: vpcmplew %xmm0, %xmm1, %k5 {%k1} ## encoding: [0x62,0xf3,0xf5,0x09,0x3f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltw %xmm1, %xmm0, %k5 {%k1} ## encoding: [0x62,0xf3,0xfd,0x09,0x3f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtw %xmm1, %xmm0, %k1 {%k1} ## encoding: [0x62,0xf1,0x7d,0x09,0x65,0xc9]
; CHECK-NEXT: kmovd %k0, %eax ## encoding: [0xc5,0xfb,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
Modified: llvm/trunk/test/CodeGen/X86/avx512bwvl-vec-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512bwvl-vec-cmp.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512bwvl-vec-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512bwvl-vec-cmp.ll Mon Feb 19 11:23:35 2018
@@ -26,7 +26,7 @@ define <32 x i8> @test256_2(<32 x i8> %x
define <16 x i16> @test256_3(<16 x i16> %x, <16 x i16> %y, <16 x i16> %x1) nounwind {
; CHECK-LABEL: test256_3:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %ymm0, %ymm1, %k1
+; CHECK-NEXT: vpcmpnltw %ymm1, %ymm0, %k1
; CHECK-NEXT: vpblendmw %ymm2, %ymm1, %ymm0 {%k1}
; CHECK-NEXT: retq
%mask = icmp sge <16 x i16> %x, %y
@@ -111,7 +111,7 @@ define <32 x i8> @test256_10(<32 x i8> %
; CHECK-LABEL: test256_10:
; CHECK: ## %bb.0:
; CHECK-NEXT: vpcmpleb %ymm1, %ymm0, %k1
-; CHECK-NEXT: vpcmpleb %ymm2, %ymm3, %k1 {%k1}
+; CHECK-NEXT: vpcmpnltb %ymm3, %ymm2, %k1 {%k1}
; CHECK-NEXT: vpblendmb %ymm0, %ymm2, %ymm0 {%k1}
; CHECK-NEXT: retq
%mask1 = icmp sge <32 x i8> %x1, %y1
@@ -139,7 +139,7 @@ define <32 x i8> @test256_11(<32 x i8> %
define <16 x i16> @test256_12(<16 x i16> %x, <16 x i16>* %y.ptr, <16 x i16> %x1, <16 x i16> %y1) nounwind {
; CHECK-LABEL: test256_12:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %ymm1, %ymm2, %k1
+; CHECK-NEXT: vpcmpnltw %ymm2, %ymm1, %k1
; CHECK-NEXT: vpcmpleuw (%rdi), %ymm0, %k1 {%k1}
; CHECK-NEXT: vpblendmw %ymm0, %ymm1, %ymm0 {%k1}
; CHECK-NEXT: retq
@@ -176,7 +176,7 @@ define <16 x i8> @test128_2(<16 x i8> %x
define <8 x i16> @test128_3(<8 x i16> %x, <8 x i16> %y, <8 x i16> %x1) nounwind {
; CHECK-LABEL: test128_3:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %xmm0, %xmm1, %k1
+; CHECK-NEXT: vpcmpnltw %xmm1, %xmm0, %k1
; CHECK-NEXT: vpblendmw %xmm2, %xmm1, %xmm0 {%k1}
; CHECK-NEXT: retq
%mask = icmp sge <8 x i16> %x, %y
@@ -261,7 +261,7 @@ define <16 x i8> @test128_10(<16 x i8> %
; CHECK-LABEL: test128_10:
; CHECK: ## %bb.0:
; CHECK-NEXT: vpcmpleb %xmm1, %xmm0, %k1
-; CHECK-NEXT: vpcmpleb %xmm2, %xmm3, %k1 {%k1}
+; CHECK-NEXT: vpcmpnltb %xmm3, %xmm2, %k1 {%k1}
; CHECK-NEXT: vpblendmb %xmm0, %xmm2, %xmm0 {%k1}
; CHECK-NEXT: retq
%mask1 = icmp sge <16 x i8> %x1, %y1
@@ -289,7 +289,7 @@ define <16 x i8> @test128_11(<16 x i8> %
define <8 x i16> @test128_12(<8 x i16> %x, <8 x i16>* %y.ptr, <8 x i16> %x1, <8 x i16> %y1) nounwind {
; CHECK-LABEL: test128_12:
; CHECK: ## %bb.0:
-; CHECK-NEXT: vpcmplew %xmm1, %xmm2, %k1
+; CHECK-NEXT: vpcmpnltw %xmm2, %xmm1, %k1
; CHECK-NEXT: vpcmpleuw (%rdi), %xmm0, %k1 {%k1}
; CHECK-NEXT: vpblendmw %xmm0, %xmm1, %xmm0 {%k1}
; CHECK-NEXT: retq
Modified: llvm/trunk/test/CodeGen/X86/avx512vl-intrinsics-upgrade.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512vl-intrinsics-upgrade.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512vl-intrinsics-upgrade.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512vl-intrinsics-upgrade.ll Mon Feb 19 11:23:35 2018
@@ -5109,7 +5109,7 @@ define <8 x i8> @test_cmp_d_256(<8 x i32
; CHECK-NEXT: vpcmpgtd %ymm0, %ymm1, %k1 ## encoding: [0x62,0xf1,0x75,0x28,0x66,0xc8]
; CHECK-NEXT: vpcmpled %ymm1, %ymm0, %k2 ## encoding: [0x62,0xf3,0x7d,0x28,0x1f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqd %ymm1, %ymm0, %k3 ## encoding: [0x62,0xf3,0x7d,0x28,0x1f,0xd9,0x04]
-; CHECK-NEXT: vpcmpled %ymm0, %ymm1, %k4 ## encoding: [0x62,0xf3,0x75,0x28,0x1f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltd %ymm1, %ymm0, %k4 ## encoding: [0x62,0xf3,0x7d,0x28,0x1f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtd %ymm1, %ymm0, %k5 ## encoding: [0x62,0xf1,0x7d,0x28,0x66,0xe9]
; CHECK-NEXT: kmovw %k0, %eax ## encoding: [0xc5,0xf8,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -5154,7 +5154,7 @@ define <8 x i8> @test_mask_cmp_d_256(<8
; CHECK-NEXT: vpcmpgtd %ymm0, %ymm1, %k2 {%k1} ## encoding: [0x62,0xf1,0x75,0x29,0x66,0xd0]
; CHECK-NEXT: vpcmpled %ymm1, %ymm0, %k3 {%k1} ## encoding: [0x62,0xf3,0x7d,0x29,0x1f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqd %ymm1, %ymm0, %k4 {%k1} ## encoding: [0x62,0xf3,0x7d,0x29,0x1f,0xe1,0x04]
-; CHECK-NEXT: vpcmpled %ymm0, %ymm1, %k5 {%k1} ## encoding: [0x62,0xf3,0x75,0x29,0x1f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltd %ymm1, %ymm0, %k5 {%k1} ## encoding: [0x62,0xf3,0x7d,0x29,0x1f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtd %ymm1, %ymm0, %k1 {%k1} ## encoding: [0x62,0xf1,0x7d,0x29,0x66,0xc9]
; CHECK-NEXT: kmovw %k0, %eax ## encoding: [0xc5,0xf8,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -5289,7 +5289,7 @@ define <8 x i8> @test_cmp_q_256(<4 x i64
; CHECK-NEXT: vpcmpgtq %ymm0, %ymm1, %k1 ## encoding: [0x62,0xf2,0xf5,0x28,0x37,0xc8]
; CHECK-NEXT: vpcmpleq %ymm1, %ymm0, %k2 ## encoding: [0x62,0xf3,0xfd,0x28,0x1f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqq %ymm1, %ymm0, %k3 ## encoding: [0x62,0xf3,0xfd,0x28,0x1f,0xd9,0x04]
-; CHECK-NEXT: vpcmpleq %ymm0, %ymm1, %k4 ## encoding: [0x62,0xf3,0xf5,0x28,0x1f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltq %ymm1, %ymm0, %k4 ## encoding: [0x62,0xf3,0xfd,0x28,0x1f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtq %ymm1, %ymm0, %k5 ## encoding: [0x62,0xf2,0xfd,0x28,0x37,0xe9]
; CHECK-NEXT: kmovw %k0, %eax ## encoding: [0xc5,0xf8,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -5334,7 +5334,7 @@ define <8 x i8> @test_mask_cmp_q_256(<4
; CHECK-NEXT: vpcmpgtq %ymm0, %ymm1, %k1 {%k2} ## encoding: [0x62,0xf2,0xf5,0x2a,0x37,0xc8]
; CHECK-NEXT: vpcmpleq %ymm1, %ymm0, %k3 {%k2} ## encoding: [0x62,0xf3,0xfd,0x2a,0x1f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqq %ymm1, %ymm0, %k4 {%k2} ## encoding: [0x62,0xf3,0xfd,0x2a,0x1f,0xe1,0x04]
-; CHECK-NEXT: vpcmpleq %ymm0, %ymm1, %k5 {%k2} ## encoding: [0x62,0xf3,0xf5,0x2a,0x1f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltq %ymm1, %ymm0, %k5 {%k2} ## encoding: [0x62,0xf3,0xfd,0x2a,0x1f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtq %ymm1, %ymm0, %k6 {%k2} ## encoding: [0x62,0xf2,0xfd,0x2a,0x37,0xf1]
; CHECK-NEXT: kshiftlw $12, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x32,0xd2,0x0c]
; CHECK-NEXT: kshiftrw $12, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x30,0xd2,0x0c]
@@ -5475,7 +5475,7 @@ define <8 x i8> @test_cmp_d_128(<4 x i32
; CHECK-NEXT: vpcmpgtd %xmm0, %xmm1, %k1 ## encoding: [0x62,0xf1,0x75,0x08,0x66,0xc8]
; CHECK-NEXT: vpcmpled %xmm1, %xmm0, %k2 ## encoding: [0x62,0xf3,0x7d,0x08,0x1f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqd %xmm1, %xmm0, %k3 ## encoding: [0x62,0xf3,0x7d,0x08,0x1f,0xd9,0x04]
-; CHECK-NEXT: vpcmpled %xmm0, %xmm1, %k4 ## encoding: [0x62,0xf3,0x75,0x08,0x1f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltd %xmm1, %xmm0, %k4 ## encoding: [0x62,0xf3,0x7d,0x08,0x1f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtd %xmm1, %xmm0, %k5 ## encoding: [0x62,0xf1,0x7d,0x08,0x66,0xe9]
; CHECK-NEXT: kmovw %k0, %eax ## encoding: [0xc5,0xf8,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -5520,7 +5520,7 @@ define <8 x i8> @test_mask_cmp_d_128(<4
; CHECK-NEXT: vpcmpgtd %xmm0, %xmm1, %k1 {%k2} ## encoding: [0x62,0xf1,0x75,0x0a,0x66,0xc8]
; CHECK-NEXT: vpcmpled %xmm1, %xmm0, %k3 {%k2} ## encoding: [0x62,0xf3,0x7d,0x0a,0x1f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqd %xmm1, %xmm0, %k4 {%k2} ## encoding: [0x62,0xf3,0x7d,0x0a,0x1f,0xe1,0x04]
-; CHECK-NEXT: vpcmpled %xmm0, %xmm1, %k5 {%k2} ## encoding: [0x62,0xf3,0x75,0x0a,0x1f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltd %xmm1, %xmm0, %k5 {%k2} ## encoding: [0x62,0xf3,0x7d,0x0a,0x1f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtd %xmm1, %xmm0, %k6 {%k2} ## encoding: [0x62,0xf1,0x7d,0x0a,0x66,0xf1]
; CHECK-NEXT: kshiftlw $12, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x32,0xd2,0x0c]
; CHECK-NEXT: kshiftrw $12, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x30,0xd2,0x0c]
@@ -5661,7 +5661,7 @@ define <8 x i8> @test_cmp_q_128(<2 x i64
; CHECK-NEXT: vpcmpgtq %xmm0, %xmm1, %k1 ## encoding: [0x62,0xf2,0xf5,0x08,0x37,0xc8]
; CHECK-NEXT: vpcmpleq %xmm1, %xmm0, %k2 ## encoding: [0x62,0xf3,0xfd,0x08,0x1f,0xd1,0x02]
; CHECK-NEXT: vpcmpneqq %xmm1, %xmm0, %k3 ## encoding: [0x62,0xf3,0xfd,0x08,0x1f,0xd9,0x04]
-; CHECK-NEXT: vpcmpleq %xmm0, %xmm1, %k4 ## encoding: [0x62,0xf3,0xf5,0x08,0x1f,0xe0,0x02]
+; CHECK-NEXT: vpcmpnltq %xmm1, %xmm0, %k4 ## encoding: [0x62,0xf3,0xfd,0x08,0x1f,0xe1,0x05]
; CHECK-NEXT: vpcmpgtq %xmm1, %xmm0, %k5 ## encoding: [0x62,0xf2,0xfd,0x08,0x37,0xe9]
; CHECK-NEXT: kmovw %k0, %eax ## encoding: [0xc5,0xf8,0x93,0xc0]
; CHECK-NEXT: vpxor %xmm0, %xmm0, %xmm0 ## EVEX TO VEX Compression encoding: [0xc5,0xf9,0xef,0xc0]
@@ -5706,7 +5706,7 @@ define <8 x i8> @test_mask_cmp_q_128(<2
; CHECK-NEXT: vpcmpgtq %xmm0, %xmm1, %k1 {%k2} ## encoding: [0x62,0xf2,0xf5,0x0a,0x37,0xc8]
; CHECK-NEXT: vpcmpleq %xmm1, %xmm0, %k3 {%k2} ## encoding: [0x62,0xf3,0xfd,0x0a,0x1f,0xd9,0x02]
; CHECK-NEXT: vpcmpneqq %xmm1, %xmm0, %k4 {%k2} ## encoding: [0x62,0xf3,0xfd,0x0a,0x1f,0xe1,0x04]
-; CHECK-NEXT: vpcmpleq %xmm0, %xmm1, %k5 {%k2} ## encoding: [0x62,0xf3,0xf5,0x0a,0x1f,0xe8,0x02]
+; CHECK-NEXT: vpcmpnltq %xmm1, %xmm0, %k5 {%k2} ## encoding: [0x62,0xf3,0xfd,0x0a,0x1f,0xe9,0x05]
; CHECK-NEXT: vpcmpgtq %xmm1, %xmm0, %k6 {%k2} ## encoding: [0x62,0xf2,0xfd,0x0a,0x37,0xf1]
; CHECK-NEXT: kshiftlw $14, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x32,0xd2,0x0e]
; CHECK-NEXT: kshiftrw $14, %k2, %k2 ## encoding: [0xc4,0xe3,0xf9,0x30,0xd2,0x0e]
Modified: llvm/trunk/test/CodeGen/X86/avx512vl-vec-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512vl-vec-cmp.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512vl-vec-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512vl-vec-cmp.ll Mon Feb 19 11:23:35 2018
@@ -46,7 +46,7 @@ define <4 x i64> @test256_2(<4 x i64> %x
define <8 x i32> @test256_3(<8 x i32> %x, <8 x i32> %y, <8 x i32> %x1) nounwind {
; VLX-LABEL: test256_3:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k1
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k1
; VLX-NEXT: vpblendmd %ymm2, %ymm1, %ymm0 {%k1}
; VLX-NEXT: retq
;
@@ -55,7 +55,7 @@ define <8 x i32> @test256_3(<8 x i32> %x
; NoVLX-NEXT: # kill: def $ymm2 killed $ymm2 def $zmm2
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k1
; NoVLX-NEXT: vpblendmd %zmm2, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -207,7 +207,7 @@ define <8 x i32> @test256_7b(<8 x i32> %
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqu (%rdi), %ymm2
-; NoVLX-NEXT: vpcmpled %zmm2, %zmm0, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm0, %zmm2, %k1
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -291,7 +291,7 @@ define <4 x i64> @test256_10(<4 x i64> %
; VLX-LABEL: test256_10:
; VLX: # %bb.0:
; VLX-NEXT: vpcmpleq %ymm1, %ymm0, %k1
-; VLX-NEXT: vpcmpleq %ymm2, %ymm3, %k1 {%k1}
+; VLX-NEXT: vpcmpnltq %ymm3, %ymm2, %k1 {%k1}
; VLX-NEXT: vpblendmq %ymm0, %ymm2, %ymm0 {%k1}
; VLX-NEXT: retq
;
@@ -302,7 +302,7 @@ define <4 x i64> @test256_10(<4 x i64> %
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpcmpleq %zmm1, %zmm0, %k1
-; NoVLX-NEXT: vpcmpleq %zmm2, %zmm3, %k1 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm3, %zmm2, %k1 {%k1}
; NoVLX-NEXT: vpblendmq %zmm0, %zmm2, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -343,7 +343,7 @@ define <4 x i64> @test256_11(<4 x i64> %
define <8 x i32> @test256_12(<8 x i32> %x, <8 x i32>* %y.ptr, <8 x i32> %x1, <8 x i32> %y1) nounwind {
; VLX-LABEL: test256_12:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %ymm1, %ymm2, %k1
+; VLX-NEXT: vpcmpnltd %ymm2, %ymm1, %k1
; VLX-NEXT: vpcmpleud (%rdi), %ymm0, %k1 {%k1}
; VLX-NEXT: vpblendmd %ymm0, %ymm1, %ymm0 {%k1}
; VLX-NEXT: retq
@@ -355,7 +355,7 @@ define <8 x i32> @test256_12(<8 x i32> %
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqu (%rdi), %ymm3
; NoVLX-NEXT: vpcmpleud %zmm3, %zmm0, %k1
-; NoVLX-NEXT: vpcmpled %zmm1, %zmm2, %k1 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm2, %zmm1, %k1 {%k1}
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -418,7 +418,7 @@ define <8 x i32> @test256_14(<8 x i32> %
define <8 x i32> @test256_15(<8 x i32> %x, i32* %yb.ptr, <8 x i32> %x1, <8 x i32> %y1) nounwind {
; VLX-LABEL: test256_15:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %ymm1, %ymm2, %k1
+; VLX-NEXT: vpcmpnltd %ymm2, %ymm1, %k1
; VLX-NEXT: vpcmpgtd (%rdi){1to8}, %ymm0, %k1 {%k1}
; VLX-NEXT: vpblendmd %ymm0, %ymm1, %ymm0 {%k1}
; VLX-NEXT: retq
@@ -428,7 +428,7 @@ define <8 x i32> @test256_15(<8 x i32> %
; NoVLX-NEXT: # kill: def $ymm2 killed $ymm2 def $zmm2
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm1, %zmm2, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm2, %zmm1, %k1
; NoVLX-NEXT: vpbroadcastd (%rdi), %ymm2
; NoVLX-NEXT: vpcmpgtd %zmm2, %zmm0, %k1 {%k1}
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
@@ -447,7 +447,7 @@ define <8 x i32> @test256_15(<8 x i32> %
define <4 x i64> @test256_16(<4 x i64> %x, i64* %yb.ptr, <4 x i64> %x1, <4 x i64> %y1) nounwind {
; VLX-LABEL: test256_16:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpleq %ymm1, %ymm2, %k1
+; VLX-NEXT: vpcmpnltq %ymm2, %ymm1, %k1
; VLX-NEXT: vpcmpgtq (%rdi){1to4}, %ymm0, %k1 {%k1}
; VLX-NEXT: vpblendmq %ymm0, %ymm1, %ymm0 {%k1}
; VLX-NEXT: retq
@@ -457,7 +457,7 @@ define <4 x i64> @test256_16(<4 x i64> %
; NoVLX-NEXT: # kill: def $ymm2 killed $ymm2 def $zmm2
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm1, %zmm2, %k1
+; NoVLX-NEXT: vpcmpnltq %zmm2, %zmm1, %k1
; NoVLX-NEXT: vpbroadcastq (%rdi), %ymm2
; NoVLX-NEXT: vpcmpgtq %zmm2, %zmm0, %k1 {%k1}
; NoVLX-NEXT: vpblendmq %zmm0, %zmm1, %zmm0 {%k1}
@@ -605,7 +605,7 @@ define <2 x i64> @test128_2(<2 x i64> %x
define <4 x i32> @test128_3(<4 x i32> %x, <4 x i32> %y, <4 x i32> %x1) nounwind {
; VLX-LABEL: test128_3:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k1
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k1
; VLX-NEXT: vpblendmd %xmm2, %xmm1, %xmm0 {%k1}
; VLX-NEXT: retq
;
@@ -614,7 +614,7 @@ define <4 x i32> @test128_3(<4 x i32> %x
; NoVLX-NEXT: # kill: def $xmm2 killed $xmm2 def $zmm2
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k1
; NoVLX-NEXT: vpblendmd %zmm2, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -766,7 +766,7 @@ define <4 x i32> @test128_7b(<4 x i32> %
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqu (%rdi), %xmm2
-; NoVLX-NEXT: vpcmpled %zmm2, %zmm0, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm0, %zmm2, %k1
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -850,7 +850,7 @@ define <2 x i64> @test128_10(<2 x i64> %
; VLX-LABEL: test128_10:
; VLX: # %bb.0:
; VLX-NEXT: vpcmpleq %xmm1, %xmm0, %k1
-; VLX-NEXT: vpcmpleq %xmm2, %xmm3, %k1 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm3, %xmm2, %k1 {%k1}
; VLX-NEXT: vpblendmq %xmm0, %xmm2, %xmm0 {%k1}
; VLX-NEXT: retq
;
@@ -861,7 +861,7 @@ define <2 x i64> @test128_10(<2 x i64> %
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpcmpleq %zmm1, %zmm0, %k1
-; NoVLX-NEXT: vpcmpleq %zmm2, %zmm3, %k1 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm3, %zmm2, %k1 {%k1}
; NoVLX-NEXT: vpblendmq %zmm0, %zmm2, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -902,7 +902,7 @@ define <2 x i64> @test128_11(<2 x i64> %
define <4 x i32> @test128_12(<4 x i32> %x, <4 x i32>* %y.ptr, <4 x i32> %x1, <4 x i32> %y1) nounwind {
; VLX-LABEL: test128_12:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %xmm1, %xmm2, %k1
+; VLX-NEXT: vpcmpnltd %xmm2, %xmm1, %k1
; VLX-NEXT: vpcmpleud (%rdi), %xmm0, %k1 {%k1}
; VLX-NEXT: vpblendmd %xmm0, %xmm1, %xmm0 {%k1}
; VLX-NEXT: retq
@@ -914,7 +914,7 @@ define <4 x i32> @test128_12(<4 x i32> %
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqu (%rdi), %xmm3
; NoVLX-NEXT: vpcmpleud %zmm3, %zmm0, %k1
-; NoVLX-NEXT: vpcmpled %zmm1, %zmm2, %k1 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm2, %zmm1, %k1 {%k1}
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0
; NoVLX-NEXT: retq
@@ -977,7 +977,7 @@ define <4 x i32> @test128_14(<4 x i32> %
define <4 x i32> @test128_15(<4 x i32> %x, i32* %yb.ptr, <4 x i32> %x1, <4 x i32> %y1) nounwind {
; VLX-LABEL: test128_15:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpled %xmm1, %xmm2, %k1
+; VLX-NEXT: vpcmpnltd %xmm2, %xmm1, %k1
; VLX-NEXT: vpcmpgtd (%rdi){1to4}, %xmm0, %k1 {%k1}
; VLX-NEXT: vpblendmd %xmm0, %xmm1, %xmm0 {%k1}
; VLX-NEXT: retq
@@ -987,7 +987,7 @@ define <4 x i32> @test128_15(<4 x i32> %
; NoVLX-NEXT: # kill: def $xmm2 killed $xmm2 def $zmm2
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm1, %zmm2, %k1
+; NoVLX-NEXT: vpcmpnltd %zmm2, %zmm1, %k1
; NoVLX-NEXT: vpbroadcastd (%rdi), %xmm2
; NoVLX-NEXT: vpcmpgtd %zmm2, %zmm0, %k1 {%k1}
; NoVLX-NEXT: vpblendmd %zmm0, %zmm1, %zmm0 {%k1}
@@ -1006,7 +1006,7 @@ define <4 x i32> @test128_15(<4 x i32> %
define <2 x i64> @test128_16(<2 x i64> %x, i64* %yb.ptr, <2 x i64> %x1, <2 x i64> %y1) nounwind {
; VLX-LABEL: test128_16:
; VLX: # %bb.0:
-; VLX-NEXT: vpcmpleq %xmm1, %xmm2, %k1
+; VLX-NEXT: vpcmpnltq %xmm2, %xmm1, %k1
; VLX-NEXT: vpcmpgtq (%rdi){1to2}, %xmm0, %k1 {%k1}
; VLX-NEXT: vpblendmq %xmm0, %xmm1, %xmm0 {%k1}
; VLX-NEXT: retq
@@ -1016,7 +1016,7 @@ define <2 x i64> @test128_16(<2 x i64> %
; NoVLX-NEXT: # kill: def $xmm2 killed $xmm2 def $zmm2
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm1, %zmm2, %k1
+; NoVLX-NEXT: vpcmpnltq %zmm2, %zmm1, %k1
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm2
; NoVLX-NEXT: vpcmpgtq %zmm2, %zmm0, %k1 {%k1}
; NoVLX-NEXT: vpblendmq %zmm0, %zmm1, %zmm0 {%k1}
Modified: llvm/trunk/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll?rev=325527&r1=325526&r2=325527&view=diff
==============================================================================
--- llvm/trunk/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll (original)
+++ llvm/trunk/test/CodeGen/X86/avx512vl-vec-masked-cmp.ll Mon Feb 19 11:23:35 2018
@@ -10791,7 +10791,7 @@ entry:
define zeroext i32 @test_vpcmpsgeb_v16i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeb_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleb %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltb %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -10846,7 +10846,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeb_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleb %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltb %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -10908,7 +10908,7 @@ entry:
define zeroext i64 @test_vpcmpsgeb_v16i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeb_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleb %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltb %xmm1, %xmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -10965,7 +10965,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeb_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleb %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltb %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -11027,7 +11027,7 @@ entry:
define zeroext i64 @test_vpcmpsgeb_v32i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeb_v32i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleb %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltb %ymm1, %ymm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11098,7 +11098,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeb_v32i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleb %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltb %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11182,7 +11182,7 @@ entry:
define zeroext i16 @test_vpcmpsgew_v8i1_v16i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -11241,7 +11241,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -11307,7 +11307,7 @@ entry:
define zeroext i32 @test_vpcmpsgew_v8i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -11362,7 +11362,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -11424,7 +11424,7 @@ entry:
define zeroext i64 @test_vpcmpsgew_v8i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -11481,7 +11481,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -11545,7 +11545,7 @@ entry:
define zeroext i32 @test_vpcmpsgew_v16i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltw %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11602,7 +11602,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11666,7 +11666,7 @@ entry:
define zeroext i64 @test_vpcmpsgew_v16i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltw %ymm1, %ymm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11725,7 +11725,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -11789,7 +11789,7 @@ entry:
define zeroext i64 @test_vpcmpsgew_v32i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgew_v32i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmplew %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltw %zmm1, %zmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -12118,7 +12118,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgew_v32i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmplew %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltw %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -12460,7 +12460,7 @@ entry:
define zeroext i8 @test_vpcmpsged_v4i1_v8i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v4i1_v8i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: retq
@@ -12469,7 +12469,7 @@ define zeroext i8 @test_vpcmpsged_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12497,7 +12497,7 @@ define zeroext i8 @test_vpcmpsged_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12518,7 +12518,7 @@ define zeroext i8 @test_masked_vpcmpsged
; VLX-LABEL: test_masked_vpcmpsged_v4i1_v8i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: retq
@@ -12528,7 +12528,7 @@ define zeroext i8 @test_masked_vpcmpsged
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12561,7 +12561,7 @@ define zeroext i8 @test_masked_vpcmpsged
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12594,7 +12594,7 @@ define zeroext i8 @test_vpcmpsged_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12626,7 +12626,7 @@ define zeroext i8 @test_masked_vpcmpsged
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12651,7 +12651,7 @@ entry:
define zeroext i16 @test_vpcmpsged_v4i1_v16i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v4i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -12660,7 +12660,7 @@ define zeroext i16 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12688,7 +12688,7 @@ define zeroext i16 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12709,7 +12709,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v4i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -12719,7 +12719,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12752,7 +12752,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12785,7 +12785,7 @@ define zeroext i16 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12817,7 +12817,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12842,7 +12842,7 @@ entry:
define zeroext i32 @test_vpcmpsged_v4i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v4i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -12850,7 +12850,7 @@ define zeroext i32 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12876,7 +12876,7 @@ define zeroext i32 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12896,7 +12896,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v4i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -12905,7 +12905,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12936,7 +12936,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12967,7 +12967,7 @@ define zeroext i32 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -12997,7 +12997,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13021,7 +13021,7 @@ entry:
define zeroext i64 @test_vpcmpsged_v4i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v4i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -13029,7 +13029,7 @@ define zeroext i64 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13056,7 +13056,7 @@ define zeroext i64 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13077,7 +13077,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v4i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -13086,7 +13086,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13118,7 +13118,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13150,7 +13150,7 @@ define zeroext i64 @test_vpcmpsged_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13181,7 +13181,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13206,7 +13206,7 @@ entry:
define zeroext i16 @test_vpcmpsged_v8i1_v16i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -13216,7 +13216,7 @@ define zeroext i16 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13245,7 +13245,7 @@ define zeroext i16 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13266,7 +13266,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -13277,7 +13277,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13310,7 +13310,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13343,7 +13343,7 @@ define zeroext i16 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13376,7 +13376,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13400,7 +13400,7 @@ entry:
define zeroext i32 @test_vpcmpsged_v8i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -13409,7 +13409,7 @@ define zeroext i32 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13436,7 +13436,7 @@ define zeroext i32 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13456,7 +13456,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -13466,7 +13466,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13497,7 +13497,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13528,7 +13528,7 @@ define zeroext i32 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13559,7 +13559,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13582,7 +13582,7 @@ entry:
define zeroext i64 @test_vpcmpsged_v8i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -13591,7 +13591,7 @@ define zeroext i64 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13619,7 +13619,7 @@ define zeroext i64 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13640,7 +13640,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -13650,7 +13650,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13682,7 +13682,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13714,7 +13714,7 @@ define zeroext i64 @test_vpcmpsged_v8i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13746,7 +13746,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastd (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $8, %k0, %k0
; NoVLX-NEXT: kshiftrw $8, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -13770,14 +13770,14 @@ entry:
define zeroext i32 @test_vpcmpsged_v16i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_vpcmpsged_v16i1_v32i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: vzeroupper
; NoVLX-NEXT: retq
@@ -13818,14 +13818,14 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v16i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_masked_vpcmpsged_v16i1_v32i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: andl %edi, %eax
; NoVLX-NEXT: vzeroupper
@@ -13928,14 +13928,14 @@ entry:
define zeroext i64 @test_vpcmpsged_v16i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsged_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_vpcmpsged_v16i1_v64i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: movzwl %ax, %eax
; NoVLX-NEXT: vzeroupper
@@ -13978,14 +13978,14 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsged_v16i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpled %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_masked_vpcmpsged_v16i1_v64i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpled %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltd %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: andl %edi, %eax
; NoVLX-NEXT: vzeroupper
@@ -14089,7 +14089,7 @@ entry:
define zeroext i4 @test_vpcmpsgeq_v2i1_v4i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v2i1_v4i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0
; VLX-NEXT: kmovb %k0, %eax
; VLX-NEXT: retq
;
@@ -14097,7 +14097,7 @@ define zeroext i4 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14125,7 +14125,7 @@ define zeroext i4 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14147,7 +14147,7 @@ define zeroext i4 @test_masked_vpcmpsgeq
; VLX-LABEL: test_masked_vpcmpsgeq_v2i1_v4i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovb %k0, %eax
; VLX-NEXT: retq
;
@@ -14156,7 +14156,7 @@ define zeroext i4 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14189,7 +14189,7 @@ define zeroext i4 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14222,7 +14222,7 @@ define zeroext i4 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14254,7 +14254,7 @@ define zeroext i4 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14280,7 +14280,7 @@ entry:
define zeroext i8 @test_vpcmpsgeq_v2i1_v8i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v2i1_v8i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: retq
@@ -14289,7 +14289,7 @@ define zeroext i8 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14317,7 +14317,7 @@ define zeroext i8 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14338,7 +14338,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; VLX-LABEL: test_masked_vpcmpsgeq_v2i1_v8i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: retq
@@ -14348,7 +14348,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14381,7 +14381,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14414,7 +14414,7 @@ define zeroext i8 @test_vpcmpsgeq_v2i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14446,7 +14446,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14471,7 +14471,7 @@ entry:
define zeroext i16 @test_vpcmpsgeq_v2i1_v16i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v2i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -14480,7 +14480,7 @@ define zeroext i16 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14508,7 +14508,7 @@ define zeroext i16 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14529,7 +14529,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v2i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: retq
@@ -14539,7 +14539,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14572,7 +14572,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14605,7 +14605,7 @@ define zeroext i16 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14637,7 +14637,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14662,7 +14662,7 @@ entry:
define zeroext i32 @test_vpcmpsgeq_v2i1_v32i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v2i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -14670,7 +14670,7 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14696,7 +14696,7 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14716,7 +14716,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v2i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: retq
;
@@ -14725,7 +14725,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14756,7 +14756,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14787,7 +14787,7 @@ define zeroext i32 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14817,7 +14817,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14841,7 +14841,7 @@ entry:
define zeroext i64 @test_vpcmpsgeq_v2i1_v64i1_mask(<2 x i64> %__a, <2 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v2i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -14849,7 +14849,7 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14876,7 +14876,7 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14897,7 +14897,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v2i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %xmm0, %xmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %xmm1, %xmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: retq
;
@@ -14906,7 +14906,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm1 killed $xmm1 def $zmm1
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14938,7 +14938,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -14970,7 +14970,7 @@ define zeroext i64 @test_vpcmpsgeq_v2i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %xmm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15001,7 +15001,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $xmm0 killed $xmm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %xmm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $14, %k0, %k0
; NoVLX-NEXT: kshiftrw $14, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15026,7 +15026,7 @@ entry:
define zeroext i8 @test_vpcmpsgeq_v4i1_v8i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v4i1_v8i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: vzeroupper
@@ -15036,7 +15036,7 @@ define zeroext i8 @test_vpcmpsgeq_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15065,7 +15065,7 @@ define zeroext i8 @test_vpcmpsgeq_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15086,7 +15086,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; VLX-LABEL: test_masked_vpcmpsgeq_v4i1_v8i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $al killed $al killed $eax
; VLX-NEXT: vzeroupper
@@ -15097,7 +15097,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15131,7 +15131,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15165,7 +15165,7 @@ define zeroext i8 @test_vpcmpsgeq_v4i1_v
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15198,7 +15198,7 @@ define zeroext i8 @test_masked_vpcmpsgeq
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15223,7 +15223,7 @@ entry:
define zeroext i16 @test_vpcmpsgeq_v4i1_v16i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v4i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -15233,7 +15233,7 @@ define zeroext i16 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15262,7 +15262,7 @@ define zeroext i16 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15283,7 +15283,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v4i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -15294,7 +15294,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15328,7 +15328,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15362,7 +15362,7 @@ define zeroext i16 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15395,7 +15395,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15420,7 +15420,7 @@ entry:
define zeroext i32 @test_vpcmpsgeq_v4i1_v32i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v4i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -15429,7 +15429,7 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15456,7 +15456,7 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15476,7 +15476,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v4i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -15486,7 +15486,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15518,7 +15518,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15550,7 +15550,7 @@ define zeroext i32 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15581,7 +15581,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15605,7 +15605,7 @@ entry:
define zeroext i64 @test_vpcmpsgeq_v4i1_v64i1_mask(<4 x i64> %__a, <4 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v4i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -15614,7 +15614,7 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15642,7 +15642,7 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15663,7 +15663,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v4i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %ymm0, %ymm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %ymm1, %ymm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -15673,7 +15673,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm1 killed $ymm1 def $zmm1
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15706,7 +15706,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vmovdqa (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15739,7 +15739,7 @@ define zeroext i64 @test_vpcmpsgeq_v4i1_
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rdi), %ymm1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15771,7 +15771,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-NEXT: # kill: def $ymm0 killed $ymm0 def $zmm0
; NoVLX-NEXT: vpbroadcastq (%rsi), %ymm1
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kshiftlw $12, %k0, %k0
; NoVLX-NEXT: kshiftrw $12, %k0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
@@ -15796,7 +15796,7 @@ entry:
define zeroext i16 @test_vpcmpsgeq_v8i1_v16i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -15804,7 +15804,7 @@ define zeroext i16 @test_vpcmpsgeq_v8i1_
;
; NoVLX-LABEL: test_vpcmpsgeq_v8i1_v16i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: # kill: def $ax killed $ax killed $eax
; NoVLX-NEXT: vzeroupper
@@ -15848,7 +15848,7 @@ define zeroext i16 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v8i1_v16i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: # kill: def $ax killed $ax killed $eax
; VLX-NEXT: vzeroupper
@@ -15857,7 +15857,7 @@ define zeroext i16 @test_masked_vpcmpsge
; NoVLX-LABEL: test_masked_vpcmpsgeq_v8i1_v16i1_mask:
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: # kill: def $ax killed $ax killed $eax
; NoVLX-NEXT: vzeroupper
@@ -15966,14 +15966,14 @@ entry:
define zeroext i32 @test_vpcmpsgeq_v8i1_v32i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_vpcmpsgeq_v8i1_v32i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: vzeroupper
; NoVLX-NEXT: retq
@@ -16014,7 +16014,7 @@ define zeroext i32 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v8i1_v32i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovd %k0, %eax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -16022,7 +16022,7 @@ define zeroext i32 @test_masked_vpcmpsge
; NoVLX-LABEL: test_masked_vpcmpsgeq_v8i1_v32i1_mask:
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: vzeroupper
; NoVLX-NEXT: retq
@@ -16124,14 +16124,14 @@ entry:
define zeroext i64 @test_vpcmpsgeq_v8i1_v64i1_mask(<8 x i64> %__a, <8 x i64> %__b) local_unnamed_addr {
; VLX-LABEL: test_vpcmpsgeq_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
;
; NoVLX-LABEL: test_vpcmpsgeq_v8i1_v64i1_mask:
; NoVLX: # %bb.0: # %entry
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: movzwl %ax, %eax
; NoVLX-NEXT: vzeroupper
@@ -16174,7 +16174,7 @@ define zeroext i64 @test_masked_vpcmpsge
; VLX-LABEL: test_masked_vpcmpsgeq_v8i1_v64i1_mask:
; VLX: # %bb.0: # %entry
; VLX-NEXT: kmovd %edi, %k1
-; VLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; VLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; VLX-NEXT: kmovq %k0, %rax
; VLX-NEXT: vzeroupper
; VLX-NEXT: retq
@@ -16182,7 +16182,7 @@ define zeroext i64 @test_masked_vpcmpsge
; NoVLX-LABEL: test_masked_vpcmpsgeq_v8i1_v64i1_mask:
; NoVLX: # %bb.0: # %entry
; NoVLX-NEXT: kmovw %edi, %k1
-; NoVLX-NEXT: vpcmpleq %zmm0, %zmm1, %k0 {%k1}
+; NoVLX-NEXT: vpcmpnltq %zmm1, %zmm0, %k0 {%k1}
; NoVLX-NEXT: kmovw %k0, %eax
; NoVLX-NEXT: movzwl %ax, %eax
; NoVLX-NEXT: vzeroupper
More information about the llvm-commits
mailing list