[llvm] 4f19a5c - [X86] avg-mask.ll - regenerate test checks for TERNLOG comments
Simon Pilgrim via llvm-commits
llvm-commits at lists.llvm.org
Thu May 15 02:14:12 PDT 2025
Author: Simon Pilgrim
Date: 2025-05-15T10:13:50+01:00
New Revision: 4f19a5c050b0f18703198d839f135a7800306eb5
URL: https://github.com/llvm/llvm-project/commit/4f19a5c050b0f18703198d839f135a7800306eb5
DIFF: https://github.com/llvm/llvm-project/commit/4f19a5c050b0f18703198d839f135a7800306eb5.diff
LOG: [X86] avg-mask.ll - regenerate test checks for TERNLOG comments
Reduces diffs in upcoming patch
Added:
Modified:
llvm/test/CodeGen/X86/avg-mask.ll
Removed:
################################################################################
diff --git a/llvm/test/CodeGen/X86/avg-mask.ll b/llvm/test/CodeGen/X86/avg-mask.ll
index ace422e1a925f..b148cd3d42df6 100644
--- a/llvm/test/CodeGen/X86/avg-mask.ll
+++ b/llvm/test/CodeGen/X86/avg-mask.ll
@@ -7,7 +7,7 @@ define <16 x i8> @avg_v16i8_mask(<16 x i8> %a, <16 x i8> %b, <16 x i8> %src, i16
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgb %xmm1, %xmm0, %xmm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
; AVX512F-NEXT: vpblendvb %xmm1, %xmm0, %xmm2, %xmm0
; AVX512F-NEXT: vzeroupper
@@ -35,7 +35,7 @@ define <16 x i8> @avg_v16i8_maskz(<16 x i8> %a, <16 x i8> %b, i16 %mask) nounwin
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgb %xmm1, %xmm0, %xmm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
; AVX512F-NEXT: vpand %xmm0, %xmm1, %xmm0
; AVX512F-NEXT: vzeroupper
@@ -64,9 +64,9 @@ define <32 x i8> @avg_v32i8_mask(<32 x i8> %a, <32 x i8> %b, <32 x i8> %src, i32
; AVX512F-NEXT: shrl $16, %edi
; AVX512F-NEXT: vpavgb %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: kmovw %edi, %k2
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512F-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm1, %ymm1
; AVX512F-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
@@ -96,9 +96,9 @@ define <32 x i8> @avg_v32i8_maskz(<32 x i8> %a, <32 x i8> %b, i32 %mask) nounwin
; AVX512F-NEXT: shrl $16, %edi
; AVX512F-NEXT: vpavgb %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: kmovw %edi, %k2
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512F-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
; AVX512F-NEXT: vpand %ymm0, %ymm1, %ymm0
@@ -137,18 +137,18 @@ define <64 x i8> @avg_v64i8_mask(<64 x i8> %a, <64 x i8> %b, <64 x i8> %src, i64
; AVX512F-NEXT: kmovw %ecx, %k2
; AVX512F-NEXT: kmovw %eax, %k3
; AVX512F-NEXT: kmovw %edi, %k4
-; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k4} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm0 {%k4} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm0, %xmm0
-; AVX512F-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k3} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k3} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm0, %ymm0
-; AVX512F-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
-; AVX512F-NEXT: vpternlogd $255, %zmm4, %zmm4, %zmm4 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm4 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm4, %xmm4
; AVX512F-NEXT: vinserti128 $1, %xmm4, %ymm3, %ymm3
; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm3, %zmm0
-; AVX512F-NEXT: vpternlogq $202, %zmm2, %zmm1, %zmm0
+; AVX512F-NEXT: vpternlogq {{.*#+}} zmm0 = zmm2 ^ (zmm0 & (zmm1 ^ zmm2))
; AVX512F-NEXT: retq
;
; AVX512BWVL-LABEL: avg_v64i8_mask:
@@ -185,14 +185,14 @@ define <64 x i8> @avg_v64i8_maskz(<64 x i8> %a, <64 x i8> %b, i64 %mask) nounwin
; AVX512F-NEXT: kmovw %ecx, %k2
; AVX512F-NEXT: kmovw %eax, %k3
; AVX512F-NEXT: kmovw %edi, %k4
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k4} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k4} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm1, %xmm1
-; AVX512F-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k3} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k3} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
; AVX512F-NEXT: vinserti128 $1, %xmm2, %ymm1, %ymm1
-; AVX512F-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm2, %xmm2
-; AVX512F-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdb %zmm3, %xmm3
; AVX512F-NEXT: vinserti128 $1, %xmm3, %ymm2, %ymm2
; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm2, %zmm1
@@ -220,7 +220,7 @@ define <8 x i16> @avg_v8i16_mask(<8 x i16> %a, <8 x i16> %b, <8 x i16> %src, i8
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgw %xmm1, %xmm0, %xmm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
; AVX512F-NEXT: vpblendvb %xmm1, %xmm0, %xmm2, %xmm0
; AVX512F-NEXT: vzeroupper
@@ -248,7 +248,7 @@ define <8 x i16> @avg_v8i16_maskz(<8 x i16> %a, <8 x i16> %b, i8 %mask) nounwind
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgw %xmm1, %xmm0, %xmm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
; AVX512F-NEXT: vpand %xmm0, %xmm1, %xmm0
; AVX512F-NEXT: vzeroupper
@@ -275,7 +275,7 @@ define <16 x i16> @avg_v16i16_mask(<16 x i16> %a, <16 x i16> %b, <16 x i16> %src
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
; AVX512F-NEXT: vpblendvb %ymm1, %ymm0, %ymm2, %ymm0
; AVX512F-NEXT: retq
@@ -302,7 +302,7 @@ define <16 x i16> @avg_v16i16_maskz(<16 x i16> %a, <16 x i16> %b, i16 %mask) nou
; AVX512F: # %bb.0:
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: kmovw %edi, %k1
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
; AVX512F-NEXT: vpand %ymm0, %ymm1, %ymm0
; AVX512F-NEXT: retq
@@ -334,12 +334,12 @@ define <32 x i16> @avg_v32i16_mask(<32 x i16> %a, <32 x i16> %b, <32 x i16> %src
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm1
; AVX512F-NEXT: kmovw %edi, %k2
-; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm0 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm0, %ymm0
-; AVX512F-NEXT: vpternlogd $255, %zmm3, %zmm3, %zmm3 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm3 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm3, %ymm3
; AVX512F-NEXT: vinserti64x4 $1, %ymm3, %zmm0, %zmm0
-; AVX512F-NEXT: vpternlogq $202, %zmm2, %zmm1, %zmm0
+; AVX512F-NEXT: vpternlogq {{.*#+}} zmm0 = zmm2 ^ (zmm0 & (zmm1 ^ zmm2))
; AVX512F-NEXT: retq
;
; AVX512BWVL-LABEL: avg_v32i16_mask:
@@ -370,9 +370,9 @@ define <32 x i16> @avg_v32i16_maskz(<32 x i16> %a, <32 x i16> %b, i32 %mask) nou
; AVX512F-NEXT: vpavgw %ymm1, %ymm0, %ymm0
; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm0, %zmm0
; AVX512F-NEXT: kmovw %edi, %k2
-; AVX512F-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 {%k1} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm1 {%k1} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm1, %ymm1
-; AVX512F-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 {%k2} {z}
+; AVX512F-NEXT: vpternlogd {{.*#+}} zmm2 {%k2} {z} = -1
; AVX512F-NEXT: vpmovdw %zmm2, %ymm2
; AVX512F-NEXT: vinserti64x4 $1, %ymm2, %zmm1, %zmm1
; AVX512F-NEXT: vpandq %zmm0, %zmm1, %zmm0
More information about the llvm-commits
mailing list