[PATCH] D31726: AliasAnalysis: Be less conservative about volatile than atomic.
Daniel Berlin via Phabricator via llvm-commits
llvm-commits at lists.llvm.org
Thu Apr 6 11:59:19 PDT 2017
dberlin updated this revision to Diff 94413.
dberlin added a comment.
- Update for comments
https://reviews.llvm.org/D31726
Files:
lib/Analysis/AliasAnalysis.cpp
lib/Transforms/Utils/MemorySSA.cpp
test/Transforms/NewGVN/volatile-nonvolatile.ll
Index: test/Transforms/NewGVN/volatile-nonvolatile.ll
===================================================================
--- test/Transforms/NewGVN/volatile-nonvolatile.ll
+++ test/Transforms/NewGVN/volatile-nonvolatile.ll
@@ -1,4 +1,3 @@
-; XFAIL: *
; RUN: opt -tbaa -newgvn -S < %s | FileCheck %s
%struct.t = type { i32* }
Index: lib/Transforms/Utils/MemorySSA.cpp
===================================================================
--- lib/Transforms/Utils/MemorySSA.cpp
+++ lib/Transforms/Utils/MemorySSA.cpp
@@ -849,7 +849,8 @@
} // anonymous namespace
namespace llvm {
-/// \brief A MemorySSAWalker that does AA walks to disambiguate accesses. It no longer does caching on its own,
+/// \brief A MemorySSAWalker that does AA walks to disambiguate accesses. It no
+/// longer does caching on its own,
/// but the name has been retained for the moment.
class MemorySSA::CachingWalker final : public MemorySSAWalker {
ClobberWalker Walker;
@@ -1456,6 +1457,19 @@
return NewAccess;
}
+// Return true if the instruction has ordering constraints.
+// Note specifically that this only considers stores and loads
+// because others are still considered ModRef by getModRefInfo.
+static inline bool isOrdered(const Instruction *I) {
+ if (auto *SI = dyn_cast<StoreInst>(I)) {
+ if (!SI->isUnordered())
+ return true;
+ } else if (auto *LI = dyn_cast<LoadInst>(I)) {
+ if (!LI->isUnordered())
+ return true;
+ }
+ return false;
+}
/// \brief Helper function to create new memory accesses
MemoryUseOrDef *MemorySSA::createNewAccess(Instruction *I) {
// The assume intrinsic has a control dependency which we model by claiming
@@ -1468,7 +1482,15 @@
// Find out what affect this instruction has on memory.
ModRefInfo ModRef = AA->getModRefInfo(I);
- bool Def = bool(ModRef & MRI_Mod);
+ // The isOrdered check is used to ensure that volatiles end up as defs
+ // (atomics end up as ModRef right now anyway). Until we separate the
+ // ordering chain from the memory chain, this enables people to see at least
+ // some relative ordering to volatiles. Note that getClobberingMemoryAccess
+ // will still give an answer that bypasses other volatile loads. TODO:
+ // Separate memory aliasing and ordering into two different chains so that we
+ // can precisely represent both "what memory will this read/write/is clobbered
+ // by" and "what instructions can I move this past".
+ bool Def = bool(ModRef & MRI_Mod) || isOrdered(I);
bool Use = bool(ModRef & MRI_Ref);
// It's possible for an instruction to not modify memory at all. During
Index: lib/Analysis/AliasAnalysis.cpp
===================================================================
--- lib/Analysis/AliasAnalysis.cpp
+++ lib/Analysis/AliasAnalysis.cpp
@@ -332,8 +332,8 @@
ModRefInfo AAResults::getModRefInfo(const LoadInst *L,
const MemoryLocation &Loc) {
- // Be conservative in the face of volatile/atomic.
- if (!L->isUnordered())
+ // Be conservative in the face of atomic.
+ if (isStrongerThan(L->getOrdering(), AtomicOrdering::Unordered))
return MRI_ModRef;
// If the load address doesn't alias the given address, it doesn't read
@@ -347,8 +347,8 @@
ModRefInfo AAResults::getModRefInfo(const StoreInst *S,
const MemoryLocation &Loc) {
- // Be conservative in the face of volatile/atomic.
- if (!S->isUnordered())
+ // Be conservative in the face of atomic.
+ if (isStrongerThan(S->getOrdering(), AtomicOrdering::Unordered))
return MRI_ModRef;
if (Loc.Ptr) {
-------------- next part --------------
A non-text attachment was scrubbed...
Name: D31726.94413.patch
Type: text/x-patch
Size: 3602 bytes
Desc: not available
URL: <http://lists.llvm.org/pipermail/llvm-commits/attachments/20170406/8cdaf29c/attachment.bin>
More information about the llvm-commits
mailing list