[llvm] 8fd6fc7 - [X86] avx-vbroadcastf128.ll - use X86 check prefix instead of X32

Simon Pilgrim via llvm-commits llvm-commits at lists.llvm.org
Sun Jan 22 07:11:01 PST 2023


Author: Simon Pilgrim
Date: 2023-01-22T14:57:06Z
New Revision: 8fd6fc78aec88d1662236b75b5ecc9a62d50a837

URL: https://github.com/llvm/llvm-project/commit/8fd6fc78aec88d1662236b75b5ecc9a62d50a837
DIFF: https://github.com/llvm/llvm-project/commit/8fd6fc78aec88d1662236b75b5ecc9a62d50a837.diff

LOG: [X86] avx-vbroadcastf128.ll - use X86 check prefix instead of X32

We try to use X32 for tests on gnux32 triples

Added: 
    

Modified: 
    llvm/test/CodeGen/X86/avx-vbroadcastf128.ll

Removed: 
    


################################################################################
diff  --git a/llvm/test/CodeGen/X86/avx-vbroadcastf128.ll b/llvm/test/CodeGen/X86/avx-vbroadcastf128.ll
index fdd3e1ab06b3..9434bc22d997 100644
--- a/llvm/test/CodeGen/X86/avx-vbroadcastf128.ll
+++ b/llvm/test/CodeGen/X86/avx-vbroadcastf128.ll
@@ -1,13 +1,13 @@
 ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
-; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=X32
+; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=X86
 ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+avx | FileCheck %s --check-prefix=X64
 
 define <4 x double> @test_broadcast_2f64_4f64(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_2f64_4f64:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_2f64_4f64:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_2f64_4f64:
 ; X64:       # %bb.0:
@@ -19,11 +19,11 @@ define <4 x double> @test_broadcast_2f64_4f64(ptr%p) nounwind {
 }
 
 define <4 x i64> @test_broadcast_2i64_4i64(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_2i64_4i64:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_2i64_4i64:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_2i64_4i64:
 ; X64:       # %bb.0:
@@ -35,11 +35,11 @@ define <4 x i64> @test_broadcast_2i64_4i64(ptr%p) nounwind {
 }
 
 define <8 x float> @test_broadcast_4f32_8f32(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_4f32_8f32:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_4f32_8f32:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_4f32_8f32:
 ; X64:       # %bb.0:
@@ -51,11 +51,11 @@ define <8 x float> @test_broadcast_4f32_8f32(ptr%p) nounwind {
 }
 
 define <8 x i32> @test_broadcast_4i32_8i32(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_4i32_8i32:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_4i32_8i32:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_4i32_8i32:
 ; X64:       # %bb.0:
@@ -67,11 +67,11 @@ define <8 x i32> @test_broadcast_4i32_8i32(ptr%p) nounwind {
 }
 
 define <16 x i16> @test_broadcast_8i16_16i16(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_8i16_16i16:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_8i16_16i16:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_8i16_16i16:
 ; X64:       # %bb.0:
@@ -83,11 +83,11 @@ define <16 x i16> @test_broadcast_8i16_16i16(ptr%p) nounwind {
 }
 
 define <32 x i8> @test_broadcast_16i8_32i8(ptr%p) nounwind {
-; X32-LABEL: test_broadcast_16i8_32i8:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_16i8_32i8:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_16i8_32i8:
 ; X64:       # %bb.0:
@@ -102,14 +102,14 @@ define <32 x i8> @test_broadcast_16i8_32i8(ptr%p) nounwind {
 ; Don't limit the transform based on extra uses of the load itself (the store is a user of the load's chain value).
 
 define void @subv_reuse_is_ok(ptr %a, ptr %b) {
-; X32-LABEL: subv_reuse_is_ok:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovups %ymm0, (%eax)
-; X32-NEXT:    vzeroupper
-; X32-NEXT:    retl
+; X86-LABEL: subv_reuse_is_ok:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovups %ymm0, (%eax)
+; X86-NEXT:    vzeroupper
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: subv_reuse_is_ok:
 ; X64:       # %bb.0:
@@ -124,13 +124,13 @@ define void @subv_reuse_is_ok(ptr %a, ptr %b) {
 }
 
 define <4 x double> @test_broadcast_2f64_4f64_reuse(ptr %p0, ptr %p1) {
-; X32-LABEL: test_broadcast_2f64_4f64_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_2f64_4f64_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_2f64_4f64_reuse:
 ; X64:       # %bb.0:
@@ -144,13 +144,13 @@ define <4 x double> @test_broadcast_2f64_4f64_reuse(ptr %p0, ptr %p1) {
 }
 
 define <4 x i64> @test_broadcast_2i64_4i64_reuse(ptr %p0, ptr %p1) {
-; X32-LABEL: test_broadcast_2i64_4i64_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_2i64_4i64_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_2i64_4i64_reuse:
 ; X64:       # %bb.0:
@@ -164,13 +164,13 @@ define <4 x i64> @test_broadcast_2i64_4i64_reuse(ptr %p0, ptr %p1) {
 }
 
 define <8 x float> @test_broadcast_4f32_8f32_reuse(ptr %p0, ptr %p1) {
-; X32-LABEL: test_broadcast_4f32_8f32_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_4f32_8f32_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_4f32_8f32_reuse:
 ; X64:       # %bb.0:
@@ -184,13 +184,13 @@ define <8 x float> @test_broadcast_4f32_8f32_reuse(ptr %p0, ptr %p1) {
 }
 
 define <8 x i32> @test_broadcast_4i32_8i32_reuse(ptr %p0, ptr %p1) {
-; X32-LABEL: test_broadcast_4i32_8i32_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_4i32_8i32_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_4i32_8i32_reuse:
 ; X64:       # %bb.0:
@@ -204,13 +204,13 @@ define <8 x i32> @test_broadcast_4i32_8i32_reuse(ptr %p0, ptr %p1) {
 }
 
 define <16 x i16> @test_broadcast_8i16_16i16_reuse(ptr%p0, ptr%p1) nounwind {
-; X32-LABEL: test_broadcast_8i16_16i16_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_8i16_16i16_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_8i16_16i16_reuse:
 ; X64:       # %bb.0:
@@ -224,13 +224,13 @@ define <16 x i16> @test_broadcast_8i16_16i16_reuse(ptr%p0, ptr%p1) nounwind {
 }
 
 define <32 x i8> @test_broadcast_16i8_32i8_reuse(ptr%p0, ptr%p1) nounwind {
-; X32-LABEL: test_broadcast_16i8_32i8_reuse:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %xmm0, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: test_broadcast_16i8_32i8_reuse:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %xmm0, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: test_broadcast_16i8_32i8_reuse:
 ; X64:       # %bb.0:
@@ -244,14 +244,14 @@ define <32 x i8> @test_broadcast_16i8_32i8_reuse(ptr%p0, ptr%p1) nounwind {
 }
 
 define <8 x i32> @PR29088(ptr %p0, ptr %p1) {
-; X32-LABEL: PR29088:
-; X32:       # %bb.0:
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %eax
-; X32-NEXT:    movl {{[0-9]+}}(%esp), %ecx
-; X32-NEXT:    vxorps %xmm1, %xmm1, %xmm1
-; X32-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
-; X32-NEXT:    vmovaps %ymm1, (%eax)
-; X32-NEXT:    retl
+; X86-LABEL: PR29088:
+; X86:       # %bb.0:
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
+; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
+; X86-NEXT:    vxorps %xmm1, %xmm1, %xmm1
+; X86-NEXT:    vbroadcastf128 {{.*#+}} ymm0 = mem[0,1,0,1]
+; X86-NEXT:    vmovaps %ymm1, (%eax)
+; X86-NEXT:    retl
 ;
 ; X64-LABEL: PR29088:
 ; X64:       # %bb.0:


        


More information about the llvm-commits mailing list