[llvm-commits] [llvm] r90956 - in /llvm/trunk: include/llvm/Analysis/MemoryDependenceAnalysis.h lib/Analysis/MemoryDependenceAnalysis.cpp lib/Transforms/Scalar/GVN.cpp
Chris Lattner
sabre at nondot.org
Tue Dec 8 23:08:01 PST 2009
Author: lattner
Date: Wed Dec 9 01:08:01 2009
New Revision: 90956
URL: http://llvm.org/viewvc/llvm-project?rev=90956&view=rev
Log:
change NonLocalDepEntry from being a typedef for an std::pair to be its
own small class. No functionality change.
Modified:
llvm/trunk/include/llvm/Analysis/MemoryDependenceAnalysis.h
llvm/trunk/lib/Analysis/MemoryDependenceAnalysis.cpp
llvm/trunk/lib/Transforms/Scalar/GVN.cpp
Modified: llvm/trunk/include/llvm/Analysis/MemoryDependenceAnalysis.h
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/include/llvm/Analysis/MemoryDependenceAnalysis.h?rev=90956&r1=90955&r2=90956&view=diff
==============================================================================
--- llvm/trunk/include/llvm/Analysis/MemoryDependenceAnalysis.h (original)
+++ llvm/trunk/include/llvm/Analysis/MemoryDependenceAnalysis.h Wed Dec 9 01:08:01 2009
@@ -131,6 +131,27 @@
}
};
+ /// NonLocalDepEntry - This is an entry in the NonLocalDepInfo cache, and an
+ /// entry in the results set for a non-local query. For each BasicBlock (the
+ /// BB entry) it keeps a MemDepResult.
+ class NonLocalDepEntry {
+ BasicBlock *BB;
+ MemDepResult Result;
+ public:
+ NonLocalDepEntry(BasicBlock *bb, MemDepResult result)
+ : BB(bb), Result(result) {}
+
+ // BB is the sort key, it can't be changed.
+ BasicBlock *getBB() const { return BB; }
+
+ const MemDepResult &getResult() const { return Result; }
+ void setResult(const MemDepResult &R) { Result = R; }
+
+ bool operator<(const NonLocalDepEntry &RHS) const {
+ return BB < RHS.BB;
+ }
+ };
+
/// MemoryDependenceAnalysis - This is an analysis that determines, for a
/// given memory operation, what preceding memory operations it depends on.
/// It builds on alias analysis information, and tries to provide a lazy,
@@ -152,7 +173,6 @@
LocalDepMapType LocalDeps;
public:
- typedef std::pair<BasicBlock*, MemDepResult> NonLocalDepEntry;
typedef std::vector<NonLocalDepEntry> NonLocalDepInfo;
private:
/// ValueIsLoadPair - This is a pair<Value*, bool> where the bool is true if
Modified: llvm/trunk/lib/Analysis/MemoryDependenceAnalysis.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Analysis/MemoryDependenceAnalysis.cpp?rev=90956&r1=90955&r2=90956&view=diff
==============================================================================
--- llvm/trunk/lib/Analysis/MemoryDependenceAnalysis.cpp (original)
+++ llvm/trunk/lib/Analysis/MemoryDependenceAnalysis.cpp Wed Dec 9 01:08:01 2009
@@ -422,7 +422,7 @@
if (Count == 0) return;
for (unsigned i = 1; i != unsigned(Count); ++i)
- assert(Cache[i-1] <= Cache[i] && "Cache isn't sorted!");
+ assert(!(Cache[i] < Cache[i-1]) && "Cache isn't sorted!");
}
#endif
@@ -463,8 +463,8 @@
// determine what is dirty, seeding our initial DirtyBlocks worklist.
for (NonLocalDepInfo::iterator I = Cache.begin(), E = Cache.end();
I != E; ++I)
- if (I->second.isDirty())
- DirtyBlocks.push_back(I->first);
+ if (I->getResult().isDirty())
+ DirtyBlocks.push_back(I->getBB());
// Sort the cache so that we can do fast binary search lookups below.
std::sort(Cache.begin(), Cache.end());
@@ -502,27 +502,27 @@
DEBUG(AssertSorted(Cache, NumSortedEntries));
NonLocalDepInfo::iterator Entry =
std::upper_bound(Cache.begin(), Cache.begin()+NumSortedEntries,
- std::make_pair(DirtyBB, MemDepResult()));
- if (Entry != Cache.begin() && prior(Entry)->first == DirtyBB)
+ NonLocalDepEntry(DirtyBB, MemDepResult()));
+ if (Entry != Cache.begin() && prior(Entry)->getBB() == DirtyBB)
--Entry;
- MemDepResult *ExistingResult = 0;
+ NonLocalDepEntry *ExistingResult = 0;
if (Entry != Cache.begin()+NumSortedEntries &&
- Entry->first == DirtyBB) {
+ Entry->getBB() == DirtyBB) {
// If we already have an entry, and if it isn't already dirty, the block
// is done.
- if (!Entry->second.isDirty())
+ if (!Entry->getResult().isDirty())
continue;
// Otherwise, remember this slot so we can update the value.
- ExistingResult = &Entry->second;
+ ExistingResult = &*Entry;
}
// If the dirty entry has a pointer, start scanning from it so we don't have
// to rescan the entire block.
BasicBlock::iterator ScanPos = DirtyBB->end();
if (ExistingResult) {
- if (Instruction *Inst = ExistingResult->getInst()) {
+ if (Instruction *Inst = ExistingResult->getResult().getInst()) {
ScanPos = Inst;
// We're removing QueryInst's use of Inst.
RemoveFromReverseMap(ReverseNonLocalDeps, Inst,
@@ -546,9 +546,9 @@
// If we had a dirty entry for the block, update it. Otherwise, just add
// a new entry.
if (ExistingResult)
- *ExistingResult = Dep;
+ ExistingResult->setResult(Dep);
else
- Cache.push_back(std::make_pair(DirtyBB, Dep));
+ Cache.push_back(NonLocalDepEntry(DirtyBB, Dep));
// If the block has a dependency (i.e. it isn't completely transparent to
// the value), remember the association!
@@ -599,8 +599,8 @@
Result, Visited, true))
return;
Result.clear();
- Result.push_back(std::make_pair(FromBB,
- MemDepResult::getClobber(FromBB->begin())));
+ Result.push_back(NonLocalDepEntry(FromBB,
+ MemDepResult::getClobber(FromBB->begin())));
}
/// GetNonLocalInfoForBlock - Compute the memdep value for BB with
@@ -616,30 +616,30 @@
// the cache set. If so, find it.
NonLocalDepInfo::iterator Entry =
std::upper_bound(Cache->begin(), Cache->begin()+NumSortedEntries,
- std::make_pair(BB, MemDepResult()));
- if (Entry != Cache->begin() && prior(Entry)->first == BB)
+ NonLocalDepEntry(BB, MemDepResult()));
+ if (Entry != Cache->begin() && (Entry-1)->getBB() == BB)
--Entry;
- MemDepResult *ExistingResult = 0;
- if (Entry != Cache->begin()+NumSortedEntries && Entry->first == BB)
- ExistingResult = &Entry->second;
+ NonLocalDepEntry *ExistingResult = 0;
+ if (Entry != Cache->begin()+NumSortedEntries && Entry->getBB() == BB)
+ ExistingResult = &*Entry;
// If we have a cached entry, and it is non-dirty, use it as the value for
// this dependency.
- if (ExistingResult && !ExistingResult->isDirty()) {
+ if (ExistingResult && !ExistingResult->getResult().isDirty()) {
++NumCacheNonLocalPtr;
- return *ExistingResult;
+ return ExistingResult->getResult();
}
// Otherwise, we have to scan for the value. If we have a dirty cache
// entry, start scanning from its position, otherwise we scan from the end
// of the block.
BasicBlock::iterator ScanPos = BB->end();
- if (ExistingResult && ExistingResult->getInst()) {
- assert(ExistingResult->getInst()->getParent() == BB &&
+ if (ExistingResult && ExistingResult->getResult().getInst()) {
+ assert(ExistingResult->getResult().getInst()->getParent() == BB &&
"Instruction invalidated?");
++NumCacheDirtyNonLocalPtr;
- ScanPos = ExistingResult->getInst();
+ ScanPos = ExistingResult->getResult().getInst();
// Eliminating the dirty entry from 'Cache', so update the reverse info.
ValueIsLoadPair CacheKey(Pointer, isLoad);
@@ -655,9 +655,9 @@
// If we had a dirty entry for the block, update it. Otherwise, just add
// a new entry.
if (ExistingResult)
- *ExistingResult = Dep;
+ ExistingResult->setResult(Dep);
else
- Cache->push_back(std::make_pair(BB, Dep));
+ Cache->push_back(NonLocalDepEntry(BB, Dep));
// If the block has a dependency (i.e. it isn't completely transparent to
// the value), remember the reverse association because we just added it
@@ -686,7 +686,7 @@
break;
case 2: {
// Two new entries, insert the last one into place.
- MemoryDependenceAnalysis::NonLocalDepEntry Val = Cache.back();
+ NonLocalDepEntry Val = Cache.back();
Cache.pop_back();
MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
std::upper_bound(Cache.begin(), Cache.end()-1, Val);
@@ -696,7 +696,7 @@
case 1:
// One new entry, Just insert the new value at the appropriate position.
if (Cache.size() != 1) {
- MemoryDependenceAnalysis::NonLocalDepEntry Val = Cache.back();
+ NonLocalDepEntry Val = Cache.back();
Cache.pop_back();
MemoryDependenceAnalysis::NonLocalDepInfo::iterator Entry =
std::upper_bound(Cache.begin(), Cache.end(), Val);
@@ -747,7 +747,7 @@
if (!Visited.empty()) {
for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
I != E; ++I) {
- DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->first);
+ DenseMap<BasicBlock*, Value*>::iterator VI = Visited.find(I->getBB());
if (VI == Visited.end() || VI->second == Pointer.getAddr())
continue;
@@ -760,8 +760,8 @@
for (NonLocalDepInfo::iterator I = Cache->begin(), E = Cache->end();
I != E; ++I) {
- Visited.insert(std::make_pair(I->first, Pointer.getAddr()));
- if (!I->second.isNonLocal())
+ Visited.insert(std::make_pair(I->getBB(), Pointer.getAddr()));
+ if (!I->getResult().isNonLocal())
Result.push_back(*I);
}
++NumCacheCompleteNonLocalPtr;
@@ -898,27 +898,27 @@
MemoryDependenceAnalysis::NonLocalDepInfo::iterator It =
std::upper_bound(Cache->begin(), Cache->end(), Entry);
- if (It != Cache->begin() && prior(It)->first == Pred)
+ if (It != Cache->begin() && (It-1)->getBB() == Pred)
--It;
- if (It == Cache->end() || It->first != Pred) {
+ if (It == Cache->end() || It->getBB() != Pred) {
Cache->insert(It, Entry);
// Add it to the reverse map.
ReverseNonLocalPtrDeps[Pred->getTerminator()].insert(CacheKey);
- } else if (!It->second.isDirty()) {
+ } else if (!It->getResult().isDirty()) {
// noop
- } else if (It->second.getInst() == Pred->getTerminator()) {
+ } else if (It->getResult().getInst() == Pred->getTerminator()) {
// Same instruction, clear the dirty marker.
- It->second = Entry.second;
- } else if (It->second.getInst() == 0) {
+ It->setResult(Entry.getResult());
+ } else if (It->getResult().getInst() == 0) {
// Dirty, with no instruction, just add this.
- It->second = Entry.second;
+ It->setResult(Entry.getResult());
ReverseNonLocalPtrDeps[Pred->getTerminator()].insert(CacheKey);
} else {
// Otherwise, dirty with a different instruction.
- RemoveFromReverseMap(ReverseNonLocalPtrDeps, It->second.getInst(),
- CacheKey);
- It->second = Entry.second;
+ RemoveFromReverseMap(ReverseNonLocalPtrDeps,
+ It->getResult().getInst(), CacheKey);
+ It->setResult(Entry.getResult());
ReverseNonLocalPtrDeps[Pred->getTerminator()].insert(CacheKey);
}
Cache = 0;
@@ -976,12 +976,12 @@
for (NonLocalDepInfo::reverse_iterator I = Cache->rbegin(); ; ++I) {
assert(I != Cache->rend() && "Didn't find current block??");
- if (I->first != BB)
+ if (I->getBB() != BB)
continue;
- assert(I->second.isNonLocal() &&
+ assert(I->getResult().isNonLocal() &&
"Should only be here with transparent block");
- I->second = MemDepResult::getClobber(BB->begin());
+ I->setResult(MemDepResult::getClobber(BB->begin()));
ReverseNonLocalPtrDeps[BB->begin()].insert(CacheKey);
Result.push_back(*I);
break;
@@ -1007,9 +1007,9 @@
NonLocalDepInfo &PInfo = It->second.second;
for (unsigned i = 0, e = PInfo.size(); i != e; ++i) {
- Instruction *Target = PInfo[i].second.getInst();
+ Instruction *Target = PInfo[i].getResult().getInst();
if (Target == 0) continue; // Ignore non-local dep results.
- assert(Target->getParent() == PInfo[i].first);
+ assert(Target->getParent() == PInfo[i].getBB());
// Eliminating the dirty entry from 'Cache', so update the reverse info.
RemoveFromReverseMap(ReverseNonLocalPtrDeps, Target, P);
@@ -1046,7 +1046,7 @@
NonLocalDepInfo &BlockMap = NLDI->second.first;
for (NonLocalDepInfo::iterator DI = BlockMap.begin(), DE = BlockMap.end();
DI != DE; ++DI)
- if (Instruction *Inst = DI->second.getInst())
+ if (Instruction *Inst = DI->getResult().getInst())
RemoveFromReverseMap(ReverseNonLocalDeps, Inst, RemInst);
NonLocalDeps.erase(NLDI);
}
@@ -1134,10 +1134,10 @@
for (NonLocalDepInfo::iterator DI = INLD.first.begin(),
DE = INLD.first.end(); DI != DE; ++DI) {
- if (DI->second.getInst() != RemInst) continue;
+ if (DI->getResult().getInst() != RemInst) continue;
// Convert to a dirty entry for the subsequent instruction.
- DI->second = NewDirtyVal;
+ DI->setResult(NewDirtyVal);
if (Instruction *NextI = NewDirtyVal.getInst())
ReverseDepsToAdd.push_back(std::make_pair(NextI, *I));
@@ -1176,10 +1176,10 @@
// Update any entries for RemInst to use the instruction after it.
for (NonLocalDepInfo::iterator DI = NLPDI.begin(), DE = NLPDI.end();
DI != DE; ++DI) {
- if (DI->second.getInst() != RemInst) continue;
+ if (DI->getResult().getInst() != RemInst) continue;
// Convert to a dirty entry for the subsequent instruction.
- DI->second = NewDirtyVal;
+ DI->setResult(NewDirtyVal);
if (Instruction *NewDirtyInst = NewDirtyVal.getInst())
ReversePtrDepsToAdd.push_back(std::make_pair(NewDirtyInst, P));
@@ -1220,7 +1220,7 @@
const NonLocalDepInfo &Val = I->second.second;
for (NonLocalDepInfo::const_iterator II = Val.begin(), E = Val.end();
II != E; ++II)
- assert(II->second.getInst() != D && "Inst occurs as NLPD value");
+ assert(II->getResult().getInst() != D && "Inst occurs as NLPD value");
}
for (NonLocalDepMapType::const_iterator I = NonLocalDeps.begin(),
@@ -1229,7 +1229,7 @@
const PerInstNLInfo &INLD = I->second;
for (NonLocalDepInfo::const_iterator II = INLD.first.begin(),
EE = INLD.first.end(); II != EE; ++II)
- assert(II->second.getInst() != D && "Inst occurs in data structures");
+ assert(II->getResult().getInst() != D && "Inst occurs in data structures");
}
for (ReverseDepMapType::const_iterator I = ReverseLocalDeps.begin(),
Modified: llvm/trunk/lib/Transforms/Scalar/GVN.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/lib/Transforms/Scalar/GVN.cpp?rev=90956&r1=90955&r2=90956&view=diff
==============================================================================
--- llvm/trunk/lib/Transforms/Scalar/GVN.cpp (original)
+++ llvm/trunk/lib/Transforms/Scalar/GVN.cpp Wed Dec 9 01:08:01 2009
@@ -491,21 +491,21 @@
// Check to see if we have a single dominating call instruction that is
// identical to C.
for (unsigned i = 0, e = deps.size(); i != e; ++i) {
- const MemoryDependenceAnalysis::NonLocalDepEntry *I = &deps[i];
+ const NonLocalDepEntry *I = &deps[i];
// Ignore non-local dependencies.
- if (I->second.isNonLocal())
+ if (I->getResult().isNonLocal())
continue;
// We don't handle non-depedencies. If we already have a call, reject
// instruction dependencies.
- if (I->second.isClobber() || cdep != 0) {
+ if (I->getResult().isClobber() || cdep != 0) {
cdep = 0;
break;
}
- CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->second.getInst());
+ CallInst *NonLocalDepCall = dyn_cast<CallInst>(I->getResult().getInst());
// FIXME: All duplicated with non-local case.
- if (NonLocalDepCall && DT->properlyDominates(I->first, C->getParent())){
+ if (NonLocalDepCall && DT->properlyDominates(I->getBB(), C->getParent())){
cdep = NonLocalDepCall;
continue;
}
@@ -1344,7 +1344,7 @@
bool GVN::processNonLocalLoad(LoadInst *LI,
SmallVectorImpl<Instruction*> &toErase) {
// Find the non-local dependencies of the load.
- SmallVector<MemoryDependenceAnalysis::NonLocalDepEntry, 64> Deps;
+ SmallVector<NonLocalDepEntry, 64> Deps;
MD->getNonLocalPointerDependency(LI->getOperand(0), true, LI->getParent(),
Deps);
//DEBUG(errs() << "INVESTIGATING NONLOCAL LOAD: "
@@ -1358,11 +1358,11 @@
// If we had a phi translation failure, we'll have a single entry which is a
// clobber in the current block. Reject this early.
- if (Deps.size() == 1 && Deps[0].second.isClobber()) {
+ if (Deps.size() == 1 && Deps[0].getResult().isClobber()) {
DEBUG(
errs() << "GVN: non-local load ";
WriteAsOperand(errs(), LI);
- errs() << " is clobbered by " << *Deps[0].second.getInst() << '\n';
+ errs() << " is clobbered by " << *Deps[0].getResult().getInst() << '\n';
);
return false;
}
@@ -1377,8 +1377,8 @@
const TargetData *TD = 0;
for (unsigned i = 0, e = Deps.size(); i != e; ++i) {
- BasicBlock *DepBB = Deps[i].first;
- MemDepResult DepInfo = Deps[i].second;
+ BasicBlock *DepBB = Deps[i].getBB();
+ MemDepResult DepInfo = Deps[i].getResult();
if (DepInfo.isClobber()) {
// If the dependence is to a store that writes to a superset of the bits
More information about the llvm-commits
mailing list