[llvm] AMDGPU: Add more tests for peephole-opt immediate folding (PR #127480)

Matt Arsenault via llvm-commits llvm-commits at lists.llvm.org
Mon Feb 17 19:28:59 PST 2025


https://github.com/arsenm updated https://github.com/llvm/llvm-project/pull/127480

>From 2c89714aaa2793cb2e7d6ffa12f2f382b7a83d88 Mon Sep 17 00:00:00 2001
From: Matt Arsenault <Matthew.Arsenault at amd.com>
Date: Mon, 17 Feb 2025 13:32:43 +0700
Subject: [PATCH] AMDGPU: Add more tests for peephole-opt immediate folding

---
 .../test/CodeGen/AMDGPU/peephole-fold-imm.mir | 214 ++++++++++++++++++
 1 file changed, 214 insertions(+)

diff --git a/llvm/test/CodeGen/AMDGPU/peephole-fold-imm.mir b/llvm/test/CodeGen/AMDGPU/peephole-fold-imm.mir
index d070a8ef5dd2d..cceed6fd008e4 100644
--- a/llvm/test/CodeGen/AMDGPU/peephole-fold-imm.mir
+++ b/llvm/test/CodeGen/AMDGPU/peephole-fold-imm.mir
@@ -344,3 +344,217 @@ body:             |
     %3:vgpr_32 = V_FMA_F32_e64 0, %0, 0, %1, 0, %2.sub1, 0, 0, implicit $mode, implicit $exec
     SI_RETURN_TO_EPILOG %3
 ...
+
+---
+name:            fold_aimm_virtual
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_aimm_virtual
+    ; GCN: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_ACCVGPR_WRITE_B32_e64_]]
+    %0:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
+    %1:agpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG implicit %1
+
+...
+
+---
+name:            fold_aimm_virtual_copy_to_vgpr
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_aimm_virtual_copy_to_vgpr
+    ; GCN: [[V_ACCVGPR_WRITE_B32_e64_:%[0-9]+]]:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
+    ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 64, implicit $exec
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B32_e32_]]
+    %0:agpr_32 = V_ACCVGPR_WRITE_B32_e64 64, implicit $exec
+    %1:vgpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG implicit %1
+
+...
+
+---
+name:            fold_v_mov_b64_64_sub0_to_vgpr_32
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_mov_b64_64_sub0_to_vgpr_32
+    ; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 -1412567312, implicit $exec
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
+    %0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    %1:vgpr_32 = COPY killed %0.sub0
+    SI_RETURN_TO_EPILOG %1
+
+...
+
+---
+name:            fold_v_mov_b64_64_sub1_to_vgpr_32
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_mov_b64_64_sub1_to_vgpr_32
+    ; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    ; GCN-NEXT: [[V_MOV_B32_e32_:%[0-9]+]]:vgpr_32 = V_MOV_B32_e32 305419896, implicit $exec
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[V_MOV_B32_e32_]]
+    %0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    %1:vgpr_32 = COPY killed %0.sub1
+    SI_RETURN_TO_EPILOG %1
+
+...
+
+---
+name:            fold_v_mov_b64_64
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_mov_b64_64
+    ; GCN: [[V_MOV_B64_e32_:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    ; GCN-NEXT: [[V_MOV_B:%[0-9]+]]:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG implicit [[V_MOV_B]]
+    %0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+    %1:vreg_64_align2 = COPY killed %0
+    SI_RETURN_TO_EPILOG implicit %1
+
+...
+
+# FIXME:
+# ---
+# name:            fold_v_mov_b64_64_to_unaligned
+# body:             |
+#   bb.0:
+#     %0:vreg_64_align2 = V_MOV_B64_e32 1311768467750121200, implicit $exec
+#     %1:vreg_64 = COPY killed %0
+#     SI_RETURN_TO_EPILOG implicit %1
+# ...
+
+# FIXME:
+# ---
+# name:            fold_v_mov_b64_pseudo_64_to_unaligned
+# body:             |
+#   bb.0:
+#     %0:vreg_64_align2 = V_MOV_B64_PSEUDO 1311768467750121200, implicit $exec
+#     %1:vreg_64 = COPY killed %0
+#     SI_RETURN_TO_EPILOG implicit %1
+# ...
+
+---
+name:            fold_s_brev_b32_simm_virtual_0
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_s_brev_b32_simm_virtual_0
+    ; GCN: [[S_BREV_B32_:%[0-9]+]]:sreg_32 = S_BREV_B32 1
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_BREV_B32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG
+    %0:sreg_32 = S_BREV_B32 1
+    %1:sreg_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG
+
+...
+
+---
+name:            fold_s_brev_b32_simm_virtual_1
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_s_brev_b32_simm_virtual_1
+    ; GCN: [[S_BREV_B32_:%[0-9]+]]:sreg_32 = S_BREV_B32 -64
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_BREV_B32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG
+    %0:sreg_32 = S_BREV_B32 -64
+    %1:sreg_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG
+
+...
+
+---
+name:            fold_v_bfrev_b32_e32_imm
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_bfrev_b32_e32_imm
+    ; GCN: [[V_BFREV_B32_e32_:%[0-9]+]]:vgpr_32 = V_BFREV_B32_e32 1, implicit $exec
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_BFREV_B32_e32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
+    %0:vgpr_32 = V_BFREV_B32_e32 1, implicit $exec
+    %1:vgpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG %1
+
+...
+
+---
+name:            fold_v_bfrev_b32_e64_imm
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_bfrev_b32_e64_imm
+    ; GCN: [[V_BFREV_B32_e64_:%[0-9]+]]:vgpr_32 = V_BFREV_B32_e64 1, implicit $exec
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_BFREV_B32_e64_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
+    %0:vgpr_32 = V_BFREV_B32_e64 1, implicit $exec
+    %1:vgpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG %1
+
+...
+
+---
+name:            fold_s_not_b32_simm_virtual_0
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_s_not_b32_simm_virtual_0
+    ; GCN: [[S_NOT_B32_:%[0-9]+]]:sreg_32 = S_NOT_B32 1, implicit-def $scc
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_NOT_B32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG
+    %0:sreg_32 = S_NOT_B32 1, implicit-def $scc
+    %1:sreg_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG
+
+...
+
+---
+name:            fold_s_not_b32_simm_virtual_1
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_s_not_b32_simm_virtual_1
+    ; GCN: [[S_NOT_B32_:%[0-9]+]]:sreg_32 = S_NOT_B32 -64, implicit-def $scc
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:sreg_32 = COPY killed [[S_NOT_B32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG
+    %0:sreg_32 = S_NOT_B32 -64, implicit-def $scc
+    %1:sreg_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG
+
+...
+
+---
+name:            fold_v_not_b32_e32_imm
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_not_b32_e32_imm
+    ; GCN: [[V_NOT_B32_e32_:%[0-9]+]]:vgpr_32 = V_NOT_B32_e32 1, implicit $exec
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_NOT_B32_e32_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
+    %0:vgpr_32 = V_NOT_B32_e32 1, implicit $exec
+    %1:vgpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG %1
+
+...
+
+---
+name:            fold_v_not_b32_e64_imm
+body:             |
+  bb.0:
+
+    ; GCN-LABEL: name: fold_v_not_b32_e64_imm
+    ; GCN: [[V_NOT_B32_e64_:%[0-9]+]]:vgpr_32 = V_NOT_B32_e64 1, implicit $exec
+    ; GCN-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY killed [[V_NOT_B32_e64_]]
+    ; GCN-NEXT: SI_RETURN_TO_EPILOG [[COPY]]
+    %0:vgpr_32 = V_NOT_B32_e64 1, implicit $exec
+    %1:vgpr_32 = COPY killed %0
+    SI_RETURN_TO_EPILOG %1
+
+...



More information about the llvm-commits mailing list