[llvm] [AA] A conservative fix for atomic store instruction. (PR #155032)

Jin Huang via llvm-commits llvm-commits at lists.llvm.org
Wed Sep 3 22:33:07 PDT 2025


https://github.com/jinhuang1102 updated https://github.com/llvm/llvm-project/pull/155032

>From 9591a91550031503d4024e293de5bb527341c8f9 Mon Sep 17 00:00:00 2001
From: Jin Huang <jingold at google.com>
Date: Fri, 22 Aug 2025 21:27:27 +0000
Subject: [PATCH] [AA] A conservative fix for atomic store instruction. The
 current atomic store check return all atomic store as a modify instruciton
 without any memory check. This pr will raise from unordered to Monotonic.

---
 llvm/lib/Analysis/AliasAnalysis.cpp           | 13 +++-
 llvm/unittests/Analysis/AliasAnalysisTest.cpp | 73 +++++++++++++++++++
 2 files changed, 84 insertions(+), 2 deletions(-)

diff --git a/llvm/lib/Analysis/AliasAnalysis.cpp b/llvm/lib/Analysis/AliasAnalysis.cpp
index 3ec009ca4adde..ed46079ca49f4 100644
--- a/llvm/lib/Analysis/AliasAnalysis.cpp
+++ b/llvm/lib/Analysis/AliasAnalysis.cpp
@@ -421,9 +421,18 @@ ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
                                     const MemoryLocation &Loc,
                                     AAQueryInfo &AAQI) {
   // Be conservative in the face of atomic.
-  if (isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered))
+  if (isStrongerThan(L->getOrdering(), AtomicOrdering::Monotonic))
     return ModRefInfo::ModRef;
 
+  // For Monotonic and unordered loads, we only need to be more conservative
+  // when the locations are MustAlias to prevent unsafe reordering of accesses
+  // to the same memory. For MayAlias, we can treat it as a normal read.
+  if (L->isAtomic()){
+      if (Loc.Ptr &&
+	  alias(MemoryLocation::get(L), Loc, AAQI, L) == AliasResult::MustAlias)
+	return ModRefInfo::ModRef;
+  }
+
   // If the load address doesn't alias the given address, it doesn't read
   // or write the specified memory.
   if (Loc.Ptr) {
@@ -439,7 +448,7 @@ ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
                                     const MemoryLocation &Loc,
                                     AAQueryInfo &AAQI) {
   // Be conservative in the face of atomic.
-  if (isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered))
+  if (isStrongerThan(S->getOrdering(), AtomicOrdering::Monotonic))
     return ModRefInfo::ModRef;
 
   if (Loc.Ptr) {
diff --git a/llvm/unittests/Analysis/AliasAnalysisTest.cpp b/llvm/unittests/Analysis/AliasAnalysisTest.cpp
index 06066b1b92c51..780fc966ad4d2 100644
--- a/llvm/unittests/Analysis/AliasAnalysisTest.cpp
+++ b/llvm/unittests/Analysis/AliasAnalysisTest.cpp
@@ -425,4 +425,77 @@ TEST_F(AAPassInfraTest, injectExternalAA) {
   EXPECT_TRUE(IsCustomAAQueried);
 }
 
+// Test that a monotonic atomic load is treated as ModRef ONLY against a
+// MustAlias locations, and as Ref against a MayAlias location.
+TEST_F(AliasAnalysisTest, MonotonicAtomicLoadIsModRefOnlyForMustAlias) {
+  LLVMContext C;
+  SMDiagnostic Err;
+  std::unique_ptr<Module> M = parseAssemblyString(R"(
+    define void @atomic_load_modref_test(i8* %p1, i8* %p2) {
+    entry:
+      %val = load atomic i8, i8* %p1 monotonic, align 1
+      ret void
+    }
+  )", Err, C);
+
+  ASSERT_TRUE(M);
+
+  Function *F = M->getFunction("atomic_load_modref_test");
+  const Instruction *AtomicLoad = &F->getEntryBlock().front();
+  const Value *Ptr1 = F->getArg(0);
+  const Value *Ptr2 = F->getArg(1);
+
+  MemoryLocation Ptr1Loc = MemoryLocation(Ptr1, LocationSize::precise(1));
+  MemoryLocation Ptr2Loc = MemoryLocation(Ptr2, LocationSize::precise(1));
+
+  auto &AA = getAAResults(*F);
+
+  // 1. MustAlias Case: The atomic load on %p1 against a location on %p1.
+  //    This should trigger the new logic and return ModRef.
+  EXPECT_EQ(ModRefInfo::ModRef, AA.getModRefInfo(AtomicLoad, Ptr1Loc));
+
+  // 2. MayAlias Case: The atomic load on %p1 against a location on %p2.
+  //    This should NOT trigger the new logic and should fall back to returning
+  //    Ref.
+  EXPECT_EQ(ModRefInfo::Ref, AA.getModRefInfo(AtomicLoad, Ptr2Loc));
+}
+
+// Test that a monotonic atomic store is no longer considered "strong" and
+// falls through to the alias checking logic, while stroner orderings do not.
+TEST_F(AliasAnalysisTest, MonotonicAtomicStoreAliasBehavior) {
+  LLVMContext C;
+  SMDiagnostic Err;
+  std::unique_ptr<Module> M = parseAssemblyString(R"(
+    define void @atomic_store_test(i8* noalias %p1, i8* noalias %p2) {
+    entry:
+      store atomic i8 0, i8* %p1 monotonic, align 1
+      store atomic i8 0, i8* %p1 release, align 1
+      ret void
+    }
+  )", Err, C);
+
+  ASSERT_TRUE(M);
+
+  Function *F = M->getFunction("atomic_store_test");
+  auto It = F->getEntryBlock().begin();
+  const Instruction *MonotonicStore = &*It++;
+  const Instruction *ReleaseStore = &*It;
+
+  const Value *Ptr2 = F->getArg(1);
+  MemoryLocation Ptr2Loc = MemoryLocation(Ptr2, LocationSize::precise(1));
+
+  auto &AA = getAAResults(*F);
+
+  // 1. Test the Monotonic store.
+  // With the change, the Monotonic store should go through the 
+  // isStrongerThan() check. Since %p1 and %p2 are noalias, the alias check
+  // should return NoAlias, resulting in NoModRef.
+  EXPECT_EQ(ModRefInfo::NoModRef, AA.getModRefInfo(MonotonicStore, Ptr2Loc));
+ 
+  // 2. Test the Relase (stronger) store.
+  // The release atomic store should be caught by the isStrongerThan() check.
+  // return ModRef without performing an alias check.
+  EXPECT_EQ(ModRefInfo::ModRef, AA.getModRefInfo(ReleaseStore, Ptr2Loc));
+}
+
 } // end anonymous namspace



More information about the llvm-commits mailing list