[llvm] [AA] Improve precision for monotonic atomic load/store operations (PR #158169)
Jin Huang via llvm-commits
llvm-commits at lists.llvm.org
Fri Sep 12 14:28:50 PDT 2025
https://github.com/jinhuang1102 updated https://github.com/llvm/llvm-project/pull/158169
>From 05c40eb40c97449ccb0e8b7a39b78ed2612242f1 Mon Sep 17 00:00:00 2001
From: Jin Huang <jingold at google.com>
Date: Thu, 11 Sep 2025 17:40:11 +0000
Subject: [PATCH 1/2] [AA] A conservative fix for atomic instruciton.
---
llvm/lib/Analysis/AliasAnalysis.cpp | 15 ++++++++++++---
.../DeadStoreElimination/atomic-todo.ll | 11 +++++++++--
2 files changed, 21 insertions(+), 5 deletions(-)
diff --git a/llvm/lib/Analysis/AliasAnalysis.cpp b/llvm/lib/Analysis/AliasAnalysis.cpp
index 3ec009ca4adde..cb9577aac405a 100644
--- a/llvm/lib/Analysis/AliasAnalysis.cpp
+++ b/llvm/lib/Analysis/AliasAnalysis.cpp
@@ -421,9 +421,18 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
const MemoryLocation &Loc,
AAQueryInfo &AAQI) {
// Be conservative in the face of atomic.
- if (isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered))
+ if (isStrongerThan(L->getOrdering(), AtomicOrdering::Monotonic))
return ModRefInfo::ModRef;
+ // For Monotonic and unordered atomic loads, if the locations are not NoAlias,
+ // we must be conservative and return ModRef to prevent unsafe reordering of
+ // accesses to the same memory.
+ if (L->isAtomic()){
+ if (Loc.Ptr &&
+ alias(MemoryLocation::get(L), Loc, AAQI, L) != AliasResult::NoAlias)
+ return ModRefInfo::ModRef;
+ }
+
// If the load address doesn't alias the given address, it doesn't read
// or write the specified memory.
if (Loc.Ptr) {
@@ -439,7 +448,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
const MemoryLocation &Loc,
AAQueryInfo &AAQI) {
// Be conservative in the face of atomic.
- if (isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered))
+ if (isStrongerThan(S->getOrdering(), AtomicOrdering::Monotonic))
return ModRefInfo::ModRef;
if (Loc.Ptr) {
@@ -458,7 +467,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
}
// Otherwise, a store just writes.
- return ModRefInfo::Mod;
+ return ModRefInfo::ModRef;
}
ModRefInfo AAResults::getModRefInfo(const FenceInst *S,
diff --git a/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll b/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
index 1c160442f8579..16f8a52b00eab 100644
--- a/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
+++ b/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
@@ -1,4 +1,3 @@
-; XFAIL: *
; RUN: opt -passes=dse -S < %s | FileCheck %s
target datalayout = "e-p:64:64:64-i1:8:8-i8:8:8-i16:16:16-i32:32:32-i64:64:64-f32:32:32-f64:64:64-v64:64:64-v128:128:128-a0:0:64-s0:64:64-f80:128:128-n8:16:32:64"
@@ -11,7 +10,7 @@ target triple = "x86_64-apple-macosx10.7.0"
@x = common global i32 0, align 4
@y = common global i32 0, align 4
-; DSE across monotonic load (allowed as long as the eliminated store isUnordered)
+; DSE across monotonic load (allowed if the monotonic load's address is NoAlias)
define i32 @test9() {
; CHECK-LABEL: test9
; CHECK-NOT: store i32 0
@@ -21,3 +20,11 @@ define i32 @test9() {
store i32 1, ptr @x
ret i32 %x
}
+
+; DSE across monotonic load (blocked if the atomic load's address isn't NoAlias)
+define i32 @test9a() {
+ store i32 0, ptr @x
+ %x = load atomic i32, ptr @ptr monotonic, align 4
+ store i32 1, ptr @x
+ ret i32 %x
+}
>From 6526ddf99c07e7a91929ee116508dd8c7d0941b4 Mon Sep 17 00:00:00 2001
From: Jin Huang <jingold at google.com>
Date: Fri, 12 Sep 2025 00:06:37 +0000
Subject: [PATCH 2/2] [AA] Improve precision for monotonic atomic load/store
operations
---
llvm/lib/Analysis/AliasAnalysis.cpp | 24 ++++++++++++-------
.../DeadStoreElimination/atomic-todo.ll | 10 ++++++--
llvm/unittests/Analysis/AliasAnalysisTest.cpp | 4 ++--
3 files changed, 25 insertions(+), 13 deletions(-)
diff --git a/llvm/lib/Analysis/AliasAnalysis.cpp b/llvm/lib/Analysis/AliasAnalysis.cpp
index cb9577aac405a..7c07129daea2d 100644
--- a/llvm/lib/Analysis/AliasAnalysis.cpp
+++ b/llvm/lib/Analysis/AliasAnalysis.cpp
@@ -424,15 +424,6 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
if (isStrongerThan(L->getOrdering(), AtomicOrdering::Monotonic))
return ModRefInfo::ModRef;
- // For Monotonic and unordered atomic loads, if the locations are not NoAlias,
- // we must be conservative and return ModRef to prevent unsafe reordering of
- // accesses to the same memory.
- if (L->isAtomic()){
- if (Loc.Ptr &&
- alias(MemoryLocation::get(L), Loc, AAQI, L) != AliasResult::NoAlias)
- return ModRefInfo::ModRef;
- }
-
// If the load address doesn't alias the given address, it doesn't read
// or write the specified memory.
if (Loc.Ptr) {
@@ -440,6 +431,14 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
if (AR == AliasResult::NoAlias)
return ModRefInfo::NoModRef;
}
+
+ // At this point, the load's ordering is at most `Monotonic` (i.e., Monotonic,
+ // Unordered, or non-atomic), and it aliases with `Loc`. The condition
+ // `isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered)` is true only
+ // for `Monotonic` loads.
+ if (isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered))
+ return ModRefInfo::ModRef;
+
// Otherwise, a load just reads.
return ModRefInfo::Ref;
}
@@ -466,6 +465,13 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
return ModRefInfo::NoModRef;
}
+ // At this point, the store's ordering is at most `Monotonic` (i.e.,
+ // Monotonic, Unordered, or non-atomic), and it aliases with `Loc`. The
+ // condition `isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered)` is
+ // true only for `Monotonic` stores.
+ if (isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered))
+ return ModRefInfo::ModRef;
+
// Otherwise, a store just writes.
return ModRefInfo::ModRef;
}
diff --git a/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll b/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
index 16f8a52b00eab..85a4a7fdd79e3 100644
--- a/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
+++ b/llvm/test/Transforms/DeadStoreElimination/atomic-todo.ll
@@ -22,9 +22,15 @@ define i32 @test9() {
}
; DSE across monotonic load (blocked if the atomic load's address isn't NoAlias)
-define i32 @test9a() {
+define i32 @test9a(ptr %ptr) {
+; CHECK-LABEL: @test9a(
+; CHECK-NEXT: store i32 0, ptr @x, align 4
+; CHECK-NEXT: [[X:%.*]] = load atomic i32, ptr [[PTR:%.*]] monotonic, align 4
+; CHECK-NEXT: store i32 1, ptr @x, align 4
+; CHECK-NEXT: ret i32 [[X]]
+;
store i32 0, ptr @x
- %x = load atomic i32, ptr @ptr monotonic, align 4
+ %x = load atomic i32, ptr %ptr monotonic, align 4
store i32 1, ptr @x
ret i32 %x
}
diff --git a/llvm/unittests/Analysis/AliasAnalysisTest.cpp b/llvm/unittests/Analysis/AliasAnalysisTest.cpp
index 06066b1b92c51..09dc121bed3ae 100644
--- a/llvm/unittests/Analysis/AliasAnalysisTest.cpp
+++ b/llvm/unittests/Analysis/AliasAnalysisTest.cpp
@@ -200,8 +200,8 @@ TEST_F(AliasAnalysisTest, getModRefInfo) {
auto &AA = getAAResults(*F);
// Check basic results
- EXPECT_EQ(AA.getModRefInfo(Store1, MemoryLocation()), ModRefInfo::Mod);
- EXPECT_EQ(AA.getModRefInfo(Store1, std::nullopt), ModRefInfo::Mod);
+ EXPECT_EQ(AA.getModRefInfo(Store1, MemoryLocation()), ModRefInfo::ModRef);
+ EXPECT_EQ(AA.getModRefInfo(Store1, std::nullopt), ModRefInfo::ModRef);
EXPECT_EQ(AA.getModRefInfo(Load1, MemoryLocation()), ModRefInfo::Ref);
EXPECT_EQ(AA.getModRefInfo(Load1, std::nullopt), ModRefInfo::Ref);
EXPECT_EQ(AA.getModRefInfo(Add1, MemoryLocation()), ModRefInfo::NoModRef);
More information about the llvm-commits
mailing list