[llvm] r340951 - [MemorySSA] Add expesive check for validating clobber accesses.

Alina Sbirlea via llvm-commits llvm-commits at lists.llvm.org
Wed Aug 29 11:26:04 PDT 2018


Author: asbirlea
Date: Wed Aug 29 11:26:04 2018
New Revision: 340951

URL: http://llvm.org/viewvc/llvm-project?rev=340951&view=rev
Log:
[MemorySSA] Add expesive check for validating clobber accesses.

Summary: Add validation of clobber accesses as expensive check.

Reviewers: george.burgess.iv

Subscribers: sanjoy, jlebar, Prazek, llvm-commits

Differential Revision: https://reviews.llvm.org/D51327

Modified:
    llvm/trunk/include/llvm/Analysis/MemorySSA.h
    llvm/trunk/lib/Analysis/MemorySSA.cpp

Modified: llvm/trunk/include/llvm/Analysis/MemorySSA.h
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/include/llvm/Analysis/MemorySSA.h?rev=340951&r1=340950&r2=340951&view=diff
==============================================================================
--- llvm/trunk/include/llvm/Analysis/MemorySSA.h (original)
+++ llvm/trunk/include/llvm/Analysis/MemorySSA.h Wed Aug 29 11:26:04 2018
@@ -774,6 +774,9 @@ public:
   /// all uses, uses appear in the right places).  This is used by unit tests.
   void verifyMemorySSA() const;
 
+  /// Check clobber sanity for an access.
+  void checkClobberSanityAccess(const MemoryAccess *MA) const;
+
   /// Used in various insertion functions to specify whether we are talking
   /// about the beginning or end of a block.
   enum InsertionPlace { Beginning, End };
@@ -788,6 +791,7 @@ protected:
   void verifyDomination(Function &F) const;
   void verifyOrdering(Function &F) const;
   void verifyDominationNumbers(const Function &F) const;
+  void verifyClobberSanity(const Function &F) const;
 
   // This is used by the use optimizer and updater.
   AccessList *getWritableBlockAccesses(const BasicBlock *BB) const {

Modified: llvm/trunk/lib/Analysis/MemorySSA.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Analysis/MemorySSA.cpp?rev=340951&r1=340950&r2=340951&view=diff
==============================================================================
--- llvm/trunk/lib/Analysis/MemorySSA.cpp (original)
+++ llvm/trunk/lib/Analysis/MemorySSA.cpp Wed Aug 29 11:26:04 2018
@@ -253,7 +253,7 @@ struct ClobberAlias {
 
 // Return a pair of {IsClobber (bool), AR (AliasResult)}. It relies on AR being
 // ignored if IsClobber = false.
-static ClobberAlias instructionClobbersQuery(MemoryDef *MD,
+static ClobberAlias instructionClobbersQuery(const MemoryDef *MD,
                                              const MemoryLocation &UseLoc,
                                              const Instruction *UseInst,
                                              AliasAnalysis &AA) {
@@ -377,11 +377,11 @@ static bool isUseTriviallyOptimizableToL
 /// \param Start     The MemoryAccess that we want to walk from.
 /// \param ClobberAt A clobber for Start.
 /// \param StartLoc  The MemoryLocation for Start.
-/// \param MSSA      The MemorySSA isntance that Start and ClobberAt belong to.
+/// \param MSSA      The MemorySSA instance that Start and ClobberAt belong to.
 /// \param Query     The UpwardsMemoryQuery we used for our search.
 /// \param AA        The AliasAnalysis we used for our search.
-static void LLVM_ATTRIBUTE_UNUSED
-checkClobberSanity(MemoryAccess *Start, MemoryAccess *ClobberAt,
+static void
+checkClobberSanity(const MemoryAccess *Start, MemoryAccess *ClobberAt,
                    const MemoryLocation &StartLoc, const MemorySSA &MSSA,
                    const UpwardsMemoryQuery &Query, AliasAnalysis &AA) {
   assert(MSSA.dominates(ClobberAt, Start) && "Clobber doesn't dominate start?");
@@ -393,21 +393,21 @@ checkClobberSanity(MemoryAccess *Start,
   }
 
   bool FoundClobber = false;
-  DenseSet<MemoryAccessPair> VisitedPhis;
-  SmallVector<MemoryAccessPair, 8> Worklist;
+  DenseSet<ConstMemoryAccessPair> VisitedPhis;
+  SmallVector<ConstMemoryAccessPair, 8> Worklist;
   Worklist.emplace_back(Start, StartLoc);
   // Walk all paths from Start to ClobberAt, while looking for clobbers. If one
   // is found, complain.
   while (!Worklist.empty()) {
-    MemoryAccessPair MAP = Worklist.pop_back_val();
+    auto MAP = Worklist.pop_back_val();
     // All we care about is that nothing from Start to ClobberAt clobbers Start.
     // We learn nothing from revisiting nodes.
     if (!VisitedPhis.insert(MAP).second)
       continue;
 
-    for (MemoryAccess *MA : def_chain(MAP.first)) {
+    for (const auto *MA : def_chain(MAP.first)) {
       if (MA == ClobberAt) {
-        if (auto *MD = dyn_cast<MemoryDef>(MA)) {
+        if (const auto *MD = dyn_cast<MemoryDef>(MA)) {
           // instructionClobbersQuery isn't essentially free, so don't use `|=`,
           // since it won't let us short-circuit.
           //
@@ -429,7 +429,11 @@ checkClobberSanity(MemoryAccess *Start,
       // We should never hit liveOnEntry, unless it's the clobber.
       assert(!MSSA.isLiveOnEntryDef(MA) && "Hit liveOnEntry before clobber?");
 
-      if (auto *MD = dyn_cast<MemoryDef>(MA)) {
+      // If Start is a Def, skip self.
+      if (MA == Start)
+        continue;
+
+      if (const auto *MD = dyn_cast<MemoryDef>(MA)) {
         (void)MD;
         assert(!instructionClobbersQuery(MD, MAP.second, Query.Inst, AA)
                     .IsClobber &&
@@ -438,7 +442,9 @@ checkClobberSanity(MemoryAccess *Start,
       }
 
       assert(isa<MemoryPhi>(MA));
-      Worklist.append(upward_defs_begin({MA, MAP.second}), upward_defs_end());
+      Worklist.append(
+          upward_defs_begin({const_cast<MemoryAccess *>(MA), MAP.second}),
+          upward_defs_end());
     }
   }
 
@@ -1668,6 +1674,34 @@ void MemorySSA::verifyMemorySSA() const
   verifyOrdering(F);
   verifyDominationNumbers(F);
   Walker->verify(this);
+  verifyClobberSanity(F);
+}
+
+/// Check sanity of the clobbering instruction for access MA.
+void MemorySSA::checkClobberSanityAccess(const MemoryAccess *MA) const {
+  if (const auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) {
+    if (!MUD->isOptimized())
+      return;
+    auto *I = MUD->getMemoryInst();
+    auto Loc = MemoryLocation::getOrNone(I);
+    if (Loc == None)
+      return;
+    auto *Clobber = MUD->getOptimized();
+    UpwardsMemoryQuery Q(I, MUD);
+    checkClobberSanity(MUD, Clobber, *Loc, *this, Q, *AA);
+  }
+}
+
+void MemorySSA::verifyClobberSanity(const Function &F) const {
+#if !defined(NDEBUG) && defined(EXPENSIVE_CHECKS)
+  for (const BasicBlock &BB : F) {
+    const AccessList *Accesses = getBlockAccesses(&BB);
+    if (!Accesses)
+      continue;
+    for (const MemoryAccess &MA : *Accesses)
+      checkClobberSanityAccess(&MA);
+  }
+#endif
 }
 
 /// Verify that all of the blocks we believe to have valid domination numbers




More information about the llvm-commits mailing list