[polly] r249266 - BlockGenerator: Use plain Value * instead of const Value *

Tobias Grosser via llvm-commits llvm-commits at lists.llvm.org
Sun Oct 4 03:18:45 PDT 2015


Author: grosser
Date: Sun Oct  4 05:18:45 2015
New Revision: 249266

URL: http://llvm.org/viewvc/llvm-project?rev=249266&view=rev
Log:
BlockGenerator: Use plain Value * instead of const Value *

The use of const qualified Value pointers prevents the use of AssertingVH. We
could probably think of adding const support to AssertingVH, but as const
correctness seems to currently provide limited benefit in Polly, we do not do
this yet.

Modified:
    polly/trunk/include/polly/CodeGen/BlockGenerators.h
    polly/trunk/lib/CodeGen/BlockGenerators.cpp

Modified: polly/trunk/include/polly/CodeGen/BlockGenerators.h
URL: http://llvm.org/viewvc/llvm-project/polly/trunk/include/polly/CodeGen/BlockGenerators.h?rev=249266&r1=249265&r2=249266&view=diff
==============================================================================
--- polly/trunk/include/polly/CodeGen/BlockGenerators.h (original)
+++ polly/trunk/include/polly/CodeGen/BlockGenerators.h Sun Oct  4 05:18:45 2015
@@ -67,7 +67,7 @@ public:
   /// @see The ScalarMap and PHIOpMap member.
   using ScalarAllocaMapTy = DenseMap<AssertingVH<Value>, AssertingVH<Value>>;
 
-  typedef llvm::DenseMap<const llvm::Value *, llvm::Value *> ValueMapT;
+  typedef llvm::DenseMap<llvm::Value *, llvm::Value *> ValueMapT;
   typedef llvm::SmallVector<ValueMapT, 8> VectorValueMapT;
 
   /// @brief Simple vector of instructions to store escape users.
@@ -429,9 +429,8 @@ protected:
   ///
   /// @returns  o A newly synthesized value.
   ///           o NULL, if synthesizing the value failed.
-  Value *trySynthesizeNewValue(ScopStmt &Stmt, const Value *Old,
-                               ValueMapT &BBMap, LoopToScevMapT &LTS,
-                               Loop *L) const;
+  Value *trySynthesizeNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap,
+                               LoopToScevMapT &LTS, Loop *L) const;
 
   /// @brief Get the new version of a value.
   ///
@@ -457,10 +456,10 @@ protected:
   /// @returns  o The old value, if it is still valid.
   ///           o The new value, if available.
   ///           o NULL, if no value is found.
-  Value *getNewValue(ScopStmt &Stmt, const Value *Old, ValueMapT &BBMap,
+  Value *getNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap,
                      LoopToScevMapT &LTS, Loop *L) const;
 
-  void copyInstScalar(ScopStmt &Stmt, const Instruction *Inst, ValueMapT &BBMap,
+  void copyInstScalar(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap,
                       LoopToScevMapT &LTS);
 
   /// @brief Get the innermost loop that surrounds an instruction.
@@ -474,22 +473,22 @@ protected:
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
   Value *generateLocationAccessed(ScopStmt &Stmt, const Instruction *Inst,
-                                  const Value *Pointer, ValueMapT &BBMap,
+                                  Value *Pointer, ValueMapT &BBMap,
                                   LoopToScevMapT &LTS,
                                   isl_id_to_ast_expr *NewAccesses);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  Value *generateScalarLoad(ScopStmt &Stmt, const LoadInst *load,
-                            ValueMapT &BBMap, LoopToScevMapT &LTS,
+  Value *generateScalarLoad(ScopStmt &Stmt, LoadInst *load, ValueMapT &BBMap,
+                            LoopToScevMapT &LTS,
                             isl_id_to_ast_expr *NewAccesses);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void generateScalarStore(ScopStmt &Stmt, const StoreInst *store,
-                           ValueMapT &BBMap, LoopToScevMapT &LTS,
+  void generateScalarStore(ScopStmt &Stmt, StoreInst *store, ValueMapT &BBMap,
+                           LoopToScevMapT &LTS,
                            isl_id_to_ast_expr *NewAccesses);
 
   /// @brief Copy a single PHI instruction.
@@ -498,7 +497,7 @@ protected:
   /// subclasses to handle PHIs different.
   ///
   /// @returns The nullptr as the BlockGenerator does not copy PHIs.
-  virtual Value *copyPHIInstruction(ScopStmt &, const PHINode *, ValueMapT &,
+  virtual Value *copyPHIInstruction(ScopStmt &, PHINode *, ValueMapT &,
                                     LoopToScevMapT &) {
     return nullptr;
   }
@@ -522,9 +521,8 @@ protected:
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void copyInstruction(ScopStmt &Stmt, const Instruction *Inst,
-                       ValueMapT &BBMap, LoopToScevMapT &LTS,
-                       isl_id_to_ast_expr *NewAccesses);
+  void copyInstruction(ScopStmt &Stmt, Instruction *Inst, ValueMapT &BBMap,
+                       LoopToScevMapT &LTS, isl_id_to_ast_expr *NewAccesses);
 
   /// @brief Helper to get the newest version of @p ScalarValue.
   ///
@@ -605,7 +603,7 @@ private:
 
   int getVectorWidth();
 
-  Value *getVectorValue(ScopStmt &Stmt, const Value *Old, ValueMapT &VectorMap,
+  Value *getVectorValue(ScopStmt &Stmt, Value *Old, ValueMapT &VectorMap,
                         VectorValueMapT &ScalarMaps, Loop *L);
 
   Type *getVectorPtrTy(const Value *V, int Width);
@@ -628,7 +626,7 @@ private:
   /// @param NewAccesses    A map from memory access ids to new ast
   ///                       expressions, which may contain new access
   ///                       expressions for certain memory accesses.
-  Value *generateStrideOneLoad(ScopStmt &Stmt, const LoadInst *Load,
+  Value *generateStrideOneLoad(ScopStmt &Stmt, LoadInst *Load,
                                VectorValueMapT &ScalarMaps,
                                __isl_keep isl_id_to_ast_expr *NewAccesses,
                                bool NegativeStride);
@@ -646,7 +644,7 @@ private:
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  Value *generateStrideZeroLoad(ScopStmt &Stmt, const LoadInst *Load,
+  Value *generateStrideZeroLoad(ScopStmt &Stmt, LoadInst *Load,
                                 ValueMapT &BBMap,
                                 __isl_keep isl_id_to_ast_expr *NewAccesses);
 
@@ -664,34 +662,34 @@ private:
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  Value *generateUnknownStrideLoad(ScopStmt &Stmt, const LoadInst *Load,
+  Value *generateUnknownStrideLoad(ScopStmt &Stmt, LoadInst *Load,
                                    VectorValueMapT &ScalarMaps,
                                    __isl_keep isl_id_to_ast_expr *NewAccesses);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void generateLoad(ScopStmt &Stmt, const LoadInst *Load, ValueMapT &VectorMap,
+  void generateLoad(ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap,
                     VectorValueMapT &ScalarMaps,
                     __isl_keep isl_id_to_ast_expr *NewAccesses);
 
-  void copyUnaryInst(ScopStmt &Stmt, const UnaryInstruction *Inst,
+  void copyUnaryInst(ScopStmt &Stmt, UnaryInstruction *Inst,
                      ValueMapT &VectorMap, VectorValueMapT &ScalarMaps);
 
-  void copyBinaryInst(ScopStmt &Stmt, const BinaryOperator *Inst,
+  void copyBinaryInst(ScopStmt &Stmt, BinaryOperator *Inst,
                       ValueMapT &VectorMap, VectorValueMapT &ScalarMaps);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void copyStore(ScopStmt &Stmt, const StoreInst *Store, ValueMapT &VectorMap,
+  void copyStore(ScopStmt &Stmt, StoreInst *Store, ValueMapT &VectorMap,
                  VectorValueMapT &ScalarMaps,
                  __isl_keep isl_id_to_ast_expr *NewAccesses);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void copyInstScalarized(ScopStmt &Stmt, const Instruction *Inst,
+  void copyInstScalarized(ScopStmt &Stmt, Instruction *Inst,
                           ValueMapT &VectorMap, VectorValueMapT &ScalarMaps,
                           __isl_keep isl_id_to_ast_expr *NewAccesses);
 
@@ -703,8 +701,8 @@ private:
   /// @param NewAccesses A map from memory access ids to new ast expressions,
   ///                    which may contain new access expressions for certain
   ///                    memory accesses.
-  void copyInstruction(ScopStmt &Stmt, const Instruction *Inst,
-                       ValueMapT &VectorMap, VectorValueMapT &ScalarMaps,
+  void copyInstruction(ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap,
+                       VectorValueMapT &ScalarMaps,
                        __isl_keep isl_id_to_ast_expr *NewAccesses);
 
   /// @param NewAccesses A map from memory access ids to new ast expressions,
@@ -797,7 +795,7 @@ private:
   /// @param LTS       A map from old loops to new induction variables as SCEVs.
   ///
   /// @returns The copied instruction or nullptr if no copy was made.
-  virtual Value *copyPHIInstruction(ScopStmt &Stmt, const PHINode *Inst,
+  virtual Value *copyPHIInstruction(ScopStmt &Stmt, PHINode *Inst,
                                     ValueMapT &BBMap,
                                     LoopToScevMapT &LTS) override;
 };

Modified: polly/trunk/lib/CodeGen/BlockGenerators.cpp
URL: http://llvm.org/viewvc/llvm-project/polly/trunk/lib/CodeGen/BlockGenerators.cpp?rev=249266&r1=249265&r2=249266&view=diff
==============================================================================
--- polly/trunk/lib/CodeGen/BlockGenerators.cpp (original)
+++ polly/trunk/lib/CodeGen/BlockGenerators.cpp Sun Oct  4 05:18:45 2015
@@ -99,7 +99,7 @@ BlockGenerator::BlockGenerator(PollyIRBu
       EntryBB(nullptr), PHIOpMap(PHIOpMap), ScalarMap(ScalarMap),
       EscapeMap(EscapeMap), GlobalMap(GlobalMap) {}
 
-Value *BlockGenerator::trySynthesizeNewValue(ScopStmt &Stmt, const Value *Old,
+Value *BlockGenerator::trySynthesizeNewValue(ScopStmt &Stmt, Value *Old,
                                              ValueMapT &BBMap,
                                              LoopToScevMapT &LTS,
                                              Loop *L) const {
@@ -129,9 +129,8 @@ Value *BlockGenerator::trySynthesizeNewV
   return nullptr;
 }
 
-Value *BlockGenerator::getNewValue(ScopStmt &Stmt, const Value *Old,
-                                   ValueMapT &BBMap, LoopToScevMapT &LTS,
-                                   Loop *L) const {
+Value *BlockGenerator::getNewValue(ScopStmt &Stmt, Value *Old, ValueMapT &BBMap,
+                                   LoopToScevMapT &LTS, Loop *L) const {
   // We assume constants never change.
   // This avoids map lookups for many calls to this function.
   if (isa<Constant>(Old))
@@ -167,7 +166,7 @@ Value *BlockGenerator::getNewValue(ScopS
   return nullptr;
 }
 
-void BlockGenerator::copyInstScalar(ScopStmt &Stmt, const Instruction *Inst,
+void BlockGenerator::copyInstScalar(ScopStmt &Stmt, Instruction *Inst,
                                     ValueMapT &BBMap, LoopToScevMapT &LTS) {
   // We do not generate debug intrinsics as we did not investigate how to
   // copy them correctly. At the current state, they just crash the code
@@ -200,8 +199,8 @@ void BlockGenerator::copyInstScalar(Scop
 }
 
 Value *BlockGenerator::generateLocationAccessed(
-    ScopStmt &Stmt, const Instruction *Inst, const Value *Pointer,
-    ValueMapT &BBMap, LoopToScevMapT &LTS, isl_id_to_ast_expr *NewAccesses) {
+    ScopStmt &Stmt, const Instruction *Inst, Value *Pointer, ValueMapT &BBMap,
+    LoopToScevMapT &LTS, isl_id_to_ast_expr *NewAccesses) {
   const MemoryAccess &MA = Stmt.getAccessFor(Inst);
 
   isl_ast_expr *AccessExpr = isl_id_to_ast_expr_get(NewAccesses, MA.getId());
@@ -234,7 +233,7 @@ Loop *BlockGenerator::getLoopForInst(con
   return LI.getLoopFor(Inst->getParent());
 }
 
-Value *BlockGenerator::generateScalarLoad(ScopStmt &Stmt, const LoadInst *Load,
+Value *BlockGenerator::generateScalarLoad(ScopStmt &Stmt, LoadInst *Load,
                                           ValueMapT &BBMap, LoopToScevMapT &LTS,
                                           isl_id_to_ast_expr *NewAccesses) {
   if (Value *PreloadLoad = GlobalMap.lookup(Load))
@@ -253,7 +252,7 @@ Value *BlockGenerator::generateScalarLoa
   return ScalarLoad;
 }
 
-void BlockGenerator::generateScalarStore(ScopStmt &Stmt, const StoreInst *Store,
+void BlockGenerator::generateScalarStore(ScopStmt &Stmt, StoreInst *Store,
                                          ValueMapT &BBMap, LoopToScevMapT &LTS,
                                          isl_id_to_ast_expr *NewAccesses) {
   auto *Pointer = Store->getPointerOperand();
@@ -269,7 +268,7 @@ void BlockGenerator::generateScalarStore
   Builder.CreateAlignedStore(ValueOperand, NewPointer, Store->getAlignment());
 }
 
-void BlockGenerator::copyInstruction(ScopStmt &Stmt, const Instruction *Inst,
+void BlockGenerator::copyInstruction(ScopStmt &Stmt, Instruction *Inst,
                                      ValueMapT &BBMap, LoopToScevMapT &LTS,
                                      isl_id_to_ast_expr *NewAccesses) {
 
@@ -642,7 +641,7 @@ VectorBlockGenerator::VectorBlockGenerat
   assert(Schedule && "No statement domain provided");
 }
 
-Value *VectorBlockGenerator::getVectorValue(ScopStmt &Stmt, const Value *Old,
+Value *VectorBlockGenerator::getVectorValue(ScopStmt &Stmt, Value *Old,
                                             ValueMapT &VectorMap,
                                             VectorValueMapT &ScalarMaps,
                                             Loop *L) {
@@ -674,7 +673,7 @@ Type *VectorBlockGenerator::getVectorPtr
 }
 
 Value *VectorBlockGenerator::generateStrideOneLoad(
-    ScopStmt &Stmt, const LoadInst *Load, VectorValueMapT &ScalarMaps,
+    ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps,
     __isl_keep isl_id_to_ast_expr *NewAccesses, bool NegativeStride = false) {
   unsigned VectorWidth = getVectorWidth();
   auto *Pointer = Load->getPointerOperand();
@@ -705,7 +704,7 @@ Value *VectorBlockGenerator::generateStr
 }
 
 Value *VectorBlockGenerator::generateStrideZeroLoad(
-    ScopStmt &Stmt, const LoadInst *Load, ValueMapT &BBMap,
+    ScopStmt &Stmt, LoadInst *Load, ValueMapT &BBMap,
     __isl_keep isl_id_to_ast_expr *NewAccesses) {
   auto *Pointer = Load->getPointerOperand();
   Type *VectorPtrType = getVectorPtrTy(Pointer, 1);
@@ -728,7 +727,7 @@ Value *VectorBlockGenerator::generateStr
 }
 
 Value *VectorBlockGenerator::generateUnknownStrideLoad(
-    ScopStmt &Stmt, const LoadInst *Load, VectorValueMapT &ScalarMaps,
+    ScopStmt &Stmt, LoadInst *Load, VectorValueMapT &ScalarMaps,
     __isl_keep isl_id_to_ast_expr *NewAccesses
 
     ) {
@@ -752,7 +751,7 @@ Value *VectorBlockGenerator::generateUnk
 }
 
 void VectorBlockGenerator::generateLoad(
-    ScopStmt &Stmt, const LoadInst *Load, ValueMapT &VectorMap,
+    ScopStmt &Stmt, LoadInst *Load, ValueMapT &VectorMap,
     VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) {
   if (Value *PreloadLoad = GlobalMap.lookup(Load)) {
     VectorMap[Load] = Builder.CreateVectorSplat(getVectorWidth(), PreloadLoad,
@@ -786,8 +785,7 @@ void VectorBlockGenerator::generateLoad(
   VectorMap[Load] = NewLoad;
 }
 
-void VectorBlockGenerator::copyUnaryInst(ScopStmt &Stmt,
-                                         const UnaryInstruction *Inst,
+void VectorBlockGenerator::copyUnaryInst(ScopStmt &Stmt, UnaryInstruction *Inst,
                                          ValueMapT &VectorMap,
                                          VectorValueMapT &ScalarMaps) {
   int VectorWidth = getVectorWidth();
@@ -801,8 +799,7 @@ void VectorBlockGenerator::copyUnaryInst
   VectorMap[Inst] = Builder.CreateCast(Cast->getOpcode(), NewOperand, DestType);
 }
 
-void VectorBlockGenerator::copyBinaryInst(ScopStmt &Stmt,
-                                          const BinaryOperator *Inst,
+void VectorBlockGenerator::copyBinaryInst(ScopStmt &Stmt, BinaryOperator *Inst,
                                           ValueMapT &VectorMap,
                                           VectorValueMapT &ScalarMaps) {
   Loop *L = getLoopForInst(Inst);
@@ -819,7 +816,7 @@ void VectorBlockGenerator::copyBinaryIns
 }
 
 void VectorBlockGenerator::copyStore(
-    ScopStmt &Stmt, const StoreInst *Store, ValueMapT &VectorMap,
+    ScopStmt &Stmt, StoreInst *Store, ValueMapT &VectorMap,
     VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) {
   const MemoryAccess &Access = Stmt.getAccessFor(Store);
 
@@ -893,7 +890,7 @@ bool VectorBlockGenerator::extractScalar
 }
 
 void VectorBlockGenerator::copyInstScalarized(
-    ScopStmt &Stmt, const Instruction *Inst, ValueMapT &VectorMap,
+    ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap,
     VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) {
   bool HasVectorOperand;
   int VectorWidth = getVectorWidth();
@@ -921,7 +918,7 @@ void VectorBlockGenerator::copyInstScala
 int VectorBlockGenerator::getVectorWidth() { return VLTS.size(); }
 
 void VectorBlockGenerator::copyInstruction(
-    ScopStmt &Stmt, const Instruction *Inst, ValueMapT &VectorMap,
+    ScopStmt &Stmt, Instruction *Inst, ValueMapT &VectorMap,
     VectorValueMapT &ScalarMaps, __isl_keep isl_id_to_ast_expr *NewAccesses) {
   // Terminator instructions control the control flow. They are explicitly
   // expressed in the clast and do not need to be copied.
@@ -1215,7 +1212,7 @@ void RegionGenerator::addOperandToPHI(Sc
   PHICopy->addIncoming(OpCopy, BBCopy);
 }
 
-Value *RegionGenerator::copyPHIInstruction(ScopStmt &Stmt, const PHINode *PHI,
+Value *RegionGenerator::copyPHIInstruction(ScopStmt &Stmt, PHINode *PHI,
                                            ValueMapT &BBMap,
                                            LoopToScevMapT &LTS) {
   unsigned NumIncoming = PHI->getNumIncomingValues();




More information about the llvm-commits mailing list