[llvm] [InitUndef] Also handle inline asm (PR #108951)

Nikita Popov via llvm-commits llvm-commits at lists.llvm.org
Wed Sep 18 02:07:44 PDT 2024


https://github.com/nikic updated https://github.com/llvm/llvm-project/pull/108951

>From 16014feaef07402363d935c133e55f483f3b55c0 Mon Sep 17 00:00:00 2001
From: Nikita Popov <npopov at redhat.com>
Date: Tue, 17 Sep 2024 11:57:28 +0200
Subject: [PATCH 1/2] [InitUndef] Also handle inline asm

InitUndef should also handle early-clobber / undef conflicts in
inline asm operands. Do this by iterating over all_defs() instead
of defs().

The newly added test was generating an "unpredictable STXP instruction,
status is also a source" error prior to this change.

Fixes https://github.com/llvm/llvm-project/issues/106380.
---
 llvm/lib/CodeGen/InitUndef.cpp               |  2 +-
 llvm/test/CodeGen/AArch64/arm64-ldxr-stxr.ll | 13 +++++++++++++
 2 files changed, 14 insertions(+), 1 deletion(-)

diff --git a/llvm/lib/CodeGen/InitUndef.cpp b/llvm/lib/CodeGen/InitUndef.cpp
index 911e8bb7a4d9ef..d4ac131a32a959 100644
--- a/llvm/lib/CodeGen/InitUndef.cpp
+++ b/llvm/lib/CodeGen/InitUndef.cpp
@@ -98,7 +98,7 @@ INITIALIZE_PASS(InitUndef, DEBUG_TYPE, INIT_UNDEF_NAME, false, false)
 char &llvm::InitUndefID = InitUndef::ID;
 
 static bool isEarlyClobberMI(MachineInstr &MI) {
-  return llvm::any_of(MI.defs(), [](const MachineOperand &DefMO) {
+  return llvm::any_of(MI.all_defs(), [](const MachineOperand &DefMO) {
     return DefMO.isReg() && DefMO.isEarlyClobber();
   });
 }
diff --git a/llvm/test/CodeGen/AArch64/arm64-ldxr-stxr.ll b/llvm/test/CodeGen/AArch64/arm64-ldxr-stxr.ll
index 4fb0c2775a7a7a..b498611242d469 100644
--- a/llvm/test/CodeGen/AArch64/arm64-ldxr-stxr.ll
+++ b/llvm/test/CodeGen/AArch64/arm64-ldxr-stxr.ll
@@ -364,6 +364,19 @@ define dso_local i32 @test_stxp_undef(ptr %p, i64 %x) nounwind {
   ret i32 %res
 }
 
+; Same as previous test, but using inline asm.
+define dso_local i32 @test_stxp_undef_inline_asm(ptr %p, i64 %x) nounwind {
+; CHECK-LABEL: test_stxp_undef_inline_asm:
+; CHECK:       // %bb.0:
+; CHECK-NEXT:    //APP
+; CHECK-NEXT:    stxp w8, x9, x1, [x0]
+; CHECK-NEXT:    //NO_APP
+; CHECK-NEXT:    mov w0, w8
+; CHECK-NEXT:    ret
+  %res = call i32 asm sideeffect "stxp ${0:w}, ${2}, ${3}, [${1}]", "=&r,r,r,r,~{memory}"(ptr %p, i64 undef, i64 %x)
+  ret i32 %res
+}
+
 declare i32 @llvm.aarch64.stlxr.p0(i64, ptr) nounwind
 ;; NOTE: These prefixes are unused and the list is autogenerated. Do not add tests below this line:
 ; FALLBACK: {{.*}}

>From 05a73055f0f69e52bc7b3812063e9b474bb6650b Mon Sep 17 00:00:00 2001
From: Nikita Popov <npopov at redhat.com>
Date: Wed, 18 Sep 2024 11:07:24 +0200
Subject: [PATCH 2/2] Add ARM inline asm test

---
 llvm/test/CodeGen/Thumb2/mve-intrinsics/vcaddq.ll | 13 +++++++++++++
 1 file changed, 13 insertions(+)

diff --git a/llvm/test/CodeGen/Thumb2/mve-intrinsics/vcaddq.ll b/llvm/test/CodeGen/Thumb2/mve-intrinsics/vcaddq.ll
index 02234c63725360..322086829953dd 100644
--- a/llvm/test/CodeGen/Thumb2/mve-intrinsics/vcaddq.ll
+++ b/llvm/test/CodeGen/Thumb2/mve-intrinsics/vcaddq.ll
@@ -710,6 +710,19 @@ entry:
   ret <4 x i32> %0
 }
 
+define arm_aapcs_vfpcc <4 x i32> @test_vhcaddq_rot270_s32_undef_inline_asm() {
+; CHECK-LABEL: test_vhcaddq_rot270_s32_undef_inline_asm:
+; CHECK:       @ %bb.0: @ %entry
+; CHECK-NEXT: @APP
+; CHECK-NEXT: vhcadd.s32 q{{[0-9]+}}, q{{[0-9]+}}, q{{[0-9]+}}, #270
+; CHECK-NOT:  vhcadd.s32 q[[REG:[0-9]+]], q{{[0-9]+}}, q[[REG]], #270
+; CHECK-NEXT: @NO_APP
+; CHECK-NEXT:    bx lr
+entry:
+  %0 = call <4 x i32> asm sideeffect "vhcadd.s32 ${0}, ${1}, ${2}, #270", "=&w,w,w,~{memory}"(<4 x i32> undef, <4 x i32> undef)
+  ret <4 x i32> %0
+}
+
 define arm_aapcs_vfpcc <16 x i8> @test_vhcaddq_rot90_x_s8(<16 x i8> %a, <16 x i8> %b, i16 zeroext %p) {
 ; CHECK-LABEL: test_vhcaddq_rot90_x_s8:
 ; CHECK:       @ %bb.0: @ %entry



More information about the llvm-commits mailing list