[llvm] [AMDGPU] Avoid undefs in hazard-gfx1250-flat-scr-hi.mir. NFC (PR #170396)
Stanislav Mekhanoshin via llvm-commits
llvm-commits at lists.llvm.org
Tue Dec 2 17:53:16 PST 2025
https://github.com/rampitec updated https://github.com/llvm/llvm-project/pull/170396
>From ca750bddacbffd93a9443d79fdafa50333b65506 Mon Sep 17 00:00:00 2001
From: Stanislav Mekhanoshin <Stanislav.Mekhanoshin at amd.com>
Date: Tue, 2 Dec 2025 16:37:01 -0800
Subject: [PATCH] [AMDGPU] Avoid undefs in hazard-gfx1250-flat-scr-hi.mir. NFC
---
.../AMDGPU/hazard-gfx1250-flat-scr-hi.mir | 89 ++++++++++++-------
1 file changed, 57 insertions(+), 32 deletions(-)
diff --git a/llvm/test/CodeGen/AMDGPU/hazard-gfx1250-flat-scr-hi.mir b/llvm/test/CodeGen/AMDGPU/hazard-gfx1250-flat-scr-hi.mir
index e3b28c5518695..e98c08248af75 100644
--- a/llvm/test/CodeGen/AMDGPU/hazard-gfx1250-flat-scr-hi.mir
+++ b/llvm/test/CodeGen/AMDGPU/hazard-gfx1250-flat-scr-hi.mir
@@ -8,10 +8,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_ashr_i64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_ASHR_I64_:%[0-9]+]]:sreg_64 = S_ASHR_I64 undef %2:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: [[S_ASHR_I64_:%[0-9]+]]:sreg_64 = S_ASHR_I64 [[DEF]], [[COPY]], implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- %2:sreg_64 = S_ASHR_I64 undef %1:sreg_64, %0, implicit-def $scc
+ %2:sreg_64 = S_ASHR_I64 %1:sreg_64, %0, implicit-def $scc
...
---
@@ -21,10 +23,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_lshl_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_LSHL_B64_:%[0-9]+]]:sreg_64 = S_LSHL_B64 undef %2:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: [[S_LSHL_B64_:%[0-9]+]]:sreg_64 = S_LSHL_B64 [[DEF]], [[COPY]], implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- %2:sreg_64 = S_LSHL_B64 undef %1:sreg_64, %0, implicit-def $scc
+ %2:sreg_64 = S_LSHL_B64 %1:sreg_64, %0, implicit-def $scc
...
---
@@ -34,10 +38,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_lshr_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_LSHR_B64_:%[0-9]+]]:sreg_64 = S_LSHR_B64 undef %2:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: [[S_LSHR_B64_:%[0-9]+]]:sreg_64 = S_LSHR_B64 [[DEF]], [[COPY]], implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- %2:sreg_64 = S_LSHR_B64 undef %1:sreg_64, %0, implicit-def $scc
+ %2:sreg_64 = S_LSHR_B64 %1:sreg_64, %0, implicit-def $scc
...
---
@@ -47,10 +53,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_bfe_i64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_BFE_I64_:%[0-9]+]]:sreg_64 = S_BFE_I64 undef %2:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: [[S_BFE_I64_:%[0-9]+]]:sreg_64 = S_BFE_I64 [[DEF]], [[COPY]], implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- %2:sreg_64 = S_BFE_I64 undef %1:sreg_64, %0, implicit-def $scc
+ %2:sreg_64 = S_BFE_I64 %1:sreg_64, %0, implicit-def $scc
...
---
@@ -60,10 +68,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_bfe_u64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_BFE_U64_:%[0-9]+]]:sreg_64 = S_BFE_U64 undef %2:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: [[S_BFE_U64_:%[0-9]+]]:sreg_64 = S_BFE_U64 [[DEF]], [[COPY]], implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- %2:sreg_64 = S_BFE_U64 undef %1:sreg_64, %0, implicit-def $scc
+ %2:sreg_64 = S_BFE_U64 %1:sreg_64, %0, implicit-def $scc
...
---
@@ -86,10 +96,14 @@ body: |
bb.0:
; GCN-LABEL: name: s_bitcmp0_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: S_BITCMP0_B64 undef %1:sreg_64, [[COPY]], implicit undef $scc, implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: $scc = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: S_BITCMP0_B64 [[DEF]], [[COPY]], implicit $scc, implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
+ $scc = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- S_BITCMP0_B64 undef %1:sreg_64, %0, implicit undef $scc, implicit-def $scc
+ S_BITCMP0_B64 %1:sreg_64, %0, implicit $scc, implicit-def $scc
...
---
@@ -99,10 +113,14 @@ body: |
bb.0:
; GCN-LABEL: name: s_bitcmp1_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: S_BITCMP1_B64 undef %1:sreg_64, [[COPY]], implicit undef $scc, implicit-def $scc
+ ; GCN: [[DEF:%[0-9]+]]:sreg_64 = IMPLICIT_DEF
+ ; GCN-NEXT: $scc = IMPLICIT_DEF
+ ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: S_BITCMP1_B64 [[DEF]], [[COPY]], implicit $scc, implicit-def $scc
+ %1:sreg_64 = IMPLICIT_DEF
+ $scc = IMPLICIT_DEF
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- S_BITCMP1_B64 undef %1:sreg_64, %0, implicit undef $scc, implicit-def $scc
+ S_BITCMP1_B64 %1:sreg_64, %0, implicit $scc, implicit-def $scc
...
---
@@ -125,10 +143,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_bitset0_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_BITSET0_B64_:%[0-9]+]]:sreg_64 = S_BITSET0_B64 [[COPY]], undef [[S_BITSET0_B64_]], implicit-def $scc
- %0:sreg_32 = COPY $src_flat_scratch_base_hi
- %1:sreg_64 = S_BITSET0_B64 %0, undef %1:sreg_64, implicit-def $scc
+ ; GCN: $sgpr0_sgpr1 = IMPLICIT_DEF
+ ; GCN-NEXT: $sgpr2 = S_MOV_B32 $src_flat_scratch_base_hi
+ ; GCN-NEXT: $sgpr0_sgpr1 = S_BITSET0_B64 $sgpr2, $sgpr0_sgpr1, implicit-def $scc
+ $sgpr0_sgpr1 = IMPLICIT_DEF
+ $sgpr2 = S_MOV_B32 $src_flat_scratch_base_hi
+ $sgpr0_sgpr1 = S_BITSET0_B64 $sgpr2, $sgpr0_sgpr1, implicit-def $scc
...
---
@@ -138,10 +158,12 @@ body: |
bb.0:
; GCN-LABEL: name: s_bitset1_b64
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: [[S_BITSET1_B64_:%[0-9]+]]:sreg_64 = S_BITSET1_B64 [[COPY]], undef [[S_BITSET1_B64_]], implicit-def $scc
- %0:sreg_32 = COPY $src_flat_scratch_base_hi
- %1:sreg_64 = S_BITSET1_B64 %0, undef %1:sreg_64, implicit-def $scc
+ ; GCN: $sgpr0_sgpr1 = IMPLICIT_DEF
+ ; GCN-NEXT: $sgpr2 = S_MOV_B32 $src_flat_scratch_base_hi
+ ; GCN-NEXT: $sgpr0_sgpr1 = S_BITSET1_B64 $sgpr2, $sgpr0_sgpr1, implicit-def $scc
+ $sgpr0_sgpr1 = IMPLICIT_DEF
+ $sgpr2 = S_MOV_B32 $src_flat_scratch_base_hi
+ $sgpr0_sgpr1 = S_BITSET1_B64 $sgpr2, $sgpr0_sgpr1, implicit-def $scc
...
---
@@ -151,8 +173,11 @@ body: |
bb.0:
; GCN-LABEL: name: s_ashr_i64_phys_dst
- ; GCN: [[COPY:%[0-9]+]]:sreg_32 = COPY $src_flat_scratch_base_hi
- ; GCN-NEXT: $sgpr0_sgpr1 = S_ASHR_I64 undef %1:sreg_64, [[COPY]], implicit-def $scc
+ ; GCN: $sgpr0_sgpr1 = IMPLICIT_DEF
+ ; GCN-NEXT: $sgpr2 = COPY $src_flat_scratch_base_hi
+ ; GCN-NEXT: $sgpr0_sgpr1 = S_ASHR_I64 $sgpr0_sgpr1, $sgpr2, implicit-def $scc
+ $sgpr0_sgpr1 = IMPLICIT_DEF
+ $sgpr2 = COPY $src_flat_scratch_base_hi
%0:sreg_32 = COPY $src_flat_scratch_base_hi
- $sgpr0_sgpr1 = S_ASHR_I64 undef %1:sreg_64, %0, implicit-def $scc
+ $sgpr0_sgpr1 = S_ASHR_I64 $sgpr0_sgpr1, $sgpr2, implicit-def $scc
...
More information about the llvm-commits
mailing list