[llvm-commits] [llvm] r52303 - /llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp

Chris Lattner sabre at nondot.org
Sun Jun 15 23:30:22 PDT 2008


Author: lattner
Date: Mon Jun 16 01:30:22 2008
New Revision: 52303

URL: http://llvm.org/viewvc/llvm-project?rev=52303&view=rev
Log:
move a bunch of predicates up into their own section
in this file, no other changes.

Modified:
    llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp

Modified: llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp?rev=52303&r1=52302&r2=52303&view=diff

==============================================================================
--- llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp (original)
+++ llvm/trunk/lib/Analysis/BasicAliasAnalysis.cpp Mon Jun 16 01:30:22 2008
@@ -32,6 +32,173 @@
 #include <algorithm>
 using namespace llvm;
 
+//===----------------------------------------------------------------------===//
+// Useful predicates
+//===----------------------------------------------------------------------===//
+
+// Determine if an AllocationInst instruction escapes from the function it is
+// contained in. If it does not escape, there is no way for another function to
+// mod/ref it.  We do this by looking at its uses and determining if the uses
+// can escape (recursively).
+static bool AddressMightEscape(const Value *V) {
+  for (Value::use_const_iterator UI = V->use_begin(), E = V->use_end();
+       UI != E; ++UI) {
+    const Instruction *I = cast<Instruction>(*UI);
+    switch (I->getOpcode()) {
+    case Instruction::Load: 
+      break; //next use.
+    case Instruction::Store:
+      if (I->getOperand(0) == V)
+        return true; // Escapes if the pointer is stored.
+      break; // next use.
+    case Instruction::GetElementPtr:
+      if (AddressMightEscape(I))
+        return true;
+      break; // next use.
+    case Instruction::BitCast:
+      if (AddressMightEscape(I))
+        return true;
+      break; // next use
+    case Instruction::Ret:
+      // If returned, the address will escape to calling functions, but no
+      // callees could modify it.
+      break; // next use
+    case Instruction::Call:
+      // If the call is to a few known safe intrinsics, we know that it does
+      // not escape
+      if (!isa<MemIntrinsic>(I))
+        return true;
+      break;  // next use
+    default:
+      return true;
+    }
+  }
+  return false;
+}
+
+/// getUnderlyingObject - This traverses the use chain to figure out what object
+/// the specified value points to.  If the value points to, or is derived from,
+/// a unique object or an argument, return it.  This returns:
+///    Arguments, GlobalVariables, Functions, Allocas, Mallocs.
+static const Value *getUnderlyingObject(const Value *V) {
+  if (!isa<PointerType>(V->getType())) return V;
+
+  // If we are at some type of object, return it. GlobalValues and Allocations
+  // have unique addresses. 
+  if (isa<GlobalValue>(V) || isa<AllocationInst>(V) || isa<Argument>(V))
+    return V;
+
+  // Traverse through different addressing mechanisms...
+  if (const Instruction *I = dyn_cast<Instruction>(V)) {
+    if (isa<BitCastInst>(I) || isa<GetElementPtrInst>(I))
+      return getUnderlyingObject(I->getOperand(0));
+  } else if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
+    if (CE->getOpcode() == Instruction::BitCast || 
+        CE->getOpcode() == Instruction::GetElementPtr)
+      return getUnderlyingObject(CE->getOperand(0));
+  }
+  return V;
+}
+
+static const User *isGEP(const Value *V) {
+  if (isa<GetElementPtrInst>(V) ||
+      (isa<ConstantExpr>(V) &&
+       cast<ConstantExpr>(V)->getOpcode() == Instruction::GetElementPtr))
+    return cast<User>(V);
+  return 0;
+}
+
+static const Value *GetGEPOperands(const Value *V, 
+                                   SmallVector<Value*, 16> &GEPOps){
+  assert(GEPOps.empty() && "Expect empty list to populate!");
+  GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
+                cast<User>(V)->op_end());
+
+  // Accumulate all of the chained indexes into the operand array
+  V = cast<User>(V)->getOperand(0);
+
+  while (const User *G = isGEP(V)) {
+    if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
+        !cast<Constant>(GEPOps[0])->isNullValue())
+      break;  // Don't handle folding arbitrary pointer offsets yet...
+    GEPOps.erase(GEPOps.begin());   // Drop the zero index
+    GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
+    V = G->getOperand(0);
+  }
+  return V;
+}
+
+/// isIdentifiedObject - Return true if this pointer refers to a distinct and
+/// identifiable object.  This returns true for:
+///    Global Variables and Functions
+///    Allocas and Mallocs
+///    ByVal and NoAlias Arguments
+///
+static bool isIdentifiedObject(const Value *V) {
+  if (isa<GlobalValue>(V) || isa<AllocationInst>(V))
+    return true;
+  if (const Argument *A = dyn_cast<Argument>(V))
+    return A->hasNoAliasAttr() || A->hasByValAttr();
+  return false;
+}
+
+/// isKnownNonNull - Return true if we know that the specified value is never
+/// null.
+static bool isKnownNonNull(const Value *V) {
+  // Alloca never returns null, malloc might.
+  if (isa<AllocaInst>(V)) return true;
+  
+  // A byval argument is never null.
+  if (const Argument *A = dyn_cast<Argument>(V))
+    return A->hasByValAttr();
+
+  // Global values are not null unless extern weak.
+  if (const GlobalValue *GV = dyn_cast<GlobalValue>(V))
+    return !GV->hasExternalWeakLinkage();
+  return false;
+}
+
+/// isNonEscapingLocalObject - Return true if the pointer is to a function-local
+/// object that never escapes from the function.
+static bool isNonEscapingLocalObject(const Value *V) {
+  // If this is a local allocation, check to see if it escapes.
+  if (isa<AllocationInst>(V))
+    return !AddressMightEscape(V);
+      
+  // If this is an argument that corresponds to a byval or noalias argument,
+  // it can't escape either.
+  if (const Argument *A = dyn_cast<Argument>(V))
+    if (A->hasByValAttr() || A->hasNoAliasAttr())
+      return !AddressMightEscape(V);
+  return false;
+}
+
+
+/// isObjectSmallerThan - Return true if we can prove that the object specified
+/// by V is smaller than Size.
+static bool isObjectSmallerThan(const Value *V, unsigned Size,
+                                const TargetData &TD) {
+  const Type *AccessTy = 0;
+  if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
+    AccessTy = GV->getType()->getElementType();
+  
+  if (const AllocationInst *AI = dyn_cast<AllocationInst>(V))
+    if (!AI->isArrayAllocation())
+      AccessTy = AI->getType()->getElementType();
+
+  if (const Argument *A = dyn_cast<Argument>(V))
+    if (A->hasByValAttr())
+      AccessTy = cast<PointerType>(A->getType())->getElementType();
+  
+  if (AccessTy && AccessTy->isSized())
+    return TD.getABITypeSize(AccessTy) < Size;
+  return false;
+}
+
+//===----------------------------------------------------------------------===//
+// NoAA Pass
+//===----------------------------------------------------------------------===//
+
 namespace {
   /// NoAA - This class implements the -no-aa pass, which always returns "I
   /// don't know" for alias queries.  NoAA is unlike other alias analysis
@@ -91,6 +258,10 @@
 
 ImmutablePass *llvm::createNoAAPass() { return new NoAA(); }
 
+//===----------------------------------------------------------------------===//
+// BasicAA Pass
+//===----------------------------------------------------------------------===//
+
 namespace {
   /// BasicAliasAnalysis - This is the default alias analysis implementation.
   /// Because it doesn't chain to a previous alias analysis (like -no-aa), it
@@ -138,57 +309,6 @@
   return new BasicAliasAnalysis();
 }
 
-/// getUnderlyingObject - This traverses the use chain to figure out what object
-/// the specified value points to.  If the value points to, or is derived from,
-/// a unique object or an argument, return it.  This returns:
-///    Arguments, GlobalVariables, Functions, Allocas, Mallocs.
-static const Value *getUnderlyingObject(const Value *V) {
-  if (!isa<PointerType>(V->getType())) return V;
-
-  // If we are at some type of object, return it. GlobalValues and Allocations
-  // have unique addresses. 
-  if (isa<GlobalValue>(V) || isa<AllocationInst>(V) || isa<Argument>(V))
-    return V;
-
-  // Traverse through different addressing mechanisms...
-  if (const Instruction *I = dyn_cast<Instruction>(V)) {
-    if (isa<BitCastInst>(I) || isa<GetElementPtrInst>(I))
-      return getUnderlyingObject(I->getOperand(0));
-  } else if (const ConstantExpr *CE = dyn_cast<ConstantExpr>(V)) {
-    if (CE->getOpcode() == Instruction::BitCast || 
-        CE->getOpcode() == Instruction::GetElementPtr)
-      return getUnderlyingObject(CE->getOperand(0));
-  }
-  return V;
-}
-
-static const User *isGEP(const Value *V) {
-  if (isa<GetElementPtrInst>(V) ||
-      (isa<ConstantExpr>(V) &&
-       cast<ConstantExpr>(V)->getOpcode() == Instruction::GetElementPtr))
-    return cast<User>(V);
-  return 0;
-}
-
-static const Value *GetGEPOperands(const Value *V, 
-                                   SmallVector<Value*, 16> &GEPOps){
-  assert(GEPOps.empty() && "Expect empty list to populate!");
-  GEPOps.insert(GEPOps.end(), cast<User>(V)->op_begin()+1,
-                cast<User>(V)->op_end());
-
-  // Accumulate all of the chained indexes into the operand array
-  V = cast<User>(V)->getOperand(0);
-
-  while (const User *G = isGEP(V)) {
-    if (!isa<Constant>(GEPOps[0]) || isa<GlobalValue>(GEPOps[0]) ||
-        !cast<Constant>(GEPOps[0])->isNullValue())
-      break;  // Don't handle folding arbitrary pointer offsets yet...
-    GEPOps.erase(GEPOps.begin());   // Drop the zero index
-    GEPOps.insert(GEPOps.begin(), G->op_begin()+1, G->op_end());
-    V = G->getOperand(0);
-  }
-  return V;
-}
 
 /// pointsToConstantMemory - Chase pointers until we find a (constant
 /// global) or not.
@@ -199,46 +319,6 @@
   return false;
 }
 
-// Determine if an AllocationInst instruction escapes from the function it is
-// contained in. If it does not escape, there is no way for another function to
-// mod/ref it.  We do this by looking at its uses and determining if the uses
-// can escape (recursively).
-static bool AddressMightEscape(const Value *V) {
-  for (Value::use_const_iterator UI = V->use_begin(), E = V->use_end();
-       UI != E; ++UI) {
-    const Instruction *I = cast<Instruction>(*UI);
-    switch (I->getOpcode()) {
-    case Instruction::Load: 
-      break; //next use.
-    case Instruction::Store:
-      if (I->getOperand(0) == V)
-        return true; // Escapes if the pointer is stored.
-      break; // next use.
-    case Instruction::GetElementPtr:
-      if (AddressMightEscape(I))
-        return true;
-      break; // next use.
-    case Instruction::BitCast:
-      if (AddressMightEscape(I))
-        return true;
-      break; // next use
-    case Instruction::Ret:
-      // If returned, the address will escape to calling functions, but no
-      // callees could modify it.
-      break; // next use
-    case Instruction::Call:
-      // If the call is to a few known safe intrinsics, we know that it does
-      // not escape
-      if (!isa<MemIntrinsic>(I))
-        return true;
-      break;  // next use
-    default:
-      return true;
-    }
-  }
-  return false;
-}
-
 // getModRefInfo - Check to see if the specified callsite can clobber the
 // specified memory object.  Since we only look at local properties of this
 // function, we really can't say much about this query.  We do, however, use
@@ -287,72 +367,6 @@
   return AliasAnalysis::getModRefInfo(CS, P, Size);
 }
 
-/// isIdentifiedObject - Return true if this pointer refers to a distinct and
-/// identifiable object.  This returns true for:
-///    Global Variables and Functions
-///    Allocas and Mallocs
-///    ByVal and NoAlias Arguments
-///
-static bool isIdentifiedObject(const Value *V) {
-  if (isa<GlobalValue>(V) || isa<AllocationInst>(V))
-    return true;
-  if (const Argument *A = dyn_cast<Argument>(V))
-    return A->hasNoAliasAttr() || A->hasByValAttr();
-  return false;
-}
-
-/// isKnownNonNull - Return true if we know that the specified value is never
-/// null.
-static bool isKnownNonNull(const Value *V) {
-  // Alloca never returns null, malloc might.
-  if (isa<AllocaInst>(V)) return true;
-  
-  // A byval argument is never null.
-  if (const Argument *A = dyn_cast<Argument>(V))
-    return A->hasByValAttr();
-
-  // Global values are not null unless extern weak.
-  if (const GlobalValue *GV = dyn_cast<GlobalValue>(V))
-    return !GV->hasExternalWeakLinkage();
-  return false;
-}
-
-/// isNonEscapingLocalObject - Return true if the pointer is to a function-local
-/// object that never escapes from the function.
-static bool isNonEscapingLocalObject(const Value *V) {
-  // If this is a local allocation, check to see if it escapes.
-  if (isa<AllocationInst>(V))
-    return !AddressMightEscape(V);
-      
-  // If this is an argument that corresponds to a byval or noalias argument,
-  // it can't escape either.
-  if (const Argument *A = dyn_cast<Argument>(V))
-    if (A->hasByValAttr() || A->hasNoAliasAttr())
-      return !AddressMightEscape(V);
-  return false;
-}
-
-
-/// isObjectSmallerThan - Return true if we can prove that the object specified
-/// by V is smaller than Size.
-static bool isObjectSmallerThan(const Value *V, unsigned Size,
-                                const TargetData &TD) {
-  const Type *AccessTy = 0;
-  if (const GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
-    AccessTy = GV->getType()->getElementType();
-  
-  if (const AllocationInst *AI = dyn_cast<AllocationInst>(V))
-    if (!AI->isArrayAllocation())
-      AccessTy = AI->getType()->getElementType();
-
-  if (const Argument *A = dyn_cast<Argument>(V))
-    if (A->hasByValAttr())
-      AccessTy = cast<PointerType>(A->getType())->getElementType();
-  
-  if (AccessTy && AccessTy->isSized())
-    return TD.getABITypeSize(AccessTy) < Size;
-  return false;
-}
 
 // alias - Provide a bunch of ad-hoc rules to disambiguate in common cases, such
 // as array references.  Note that this function is heavily tail recursive.





More information about the llvm-commits mailing list