[llvm] [CodeGen] Use maps to cache lookups in MachineConstantPool::getConstantPoolIndex (PR #107135)
Weihang Fan via llvm-commits
llvm-commits at lists.llvm.org
Tue Sep 3 11:06:01 PDT 2024
https://github.com/weihangf-apple created https://github.com/llvm/llvm-project/pull/107135
`MachineConstantPool::getConstantPoolIndex` currently traverses every single entry of the constant pool for every new constant being added in order to find an identical entry it can share a slot with. This can be extremely slow especially when there are a lot of vector constants. This PR changes the function to use a DenseMap instead to look for potential candidates for reuse. On an internal use case, backend time is reduced from 176s to 26s with this PR.
>From e5364224bbeb766500649f56be66ebc4999f58b5 Mon Sep 17 00:00:00 2001
From: Weihang Fan <weihang_fan at apple.com>
Date: Fri, 26 Apr 2024 14:09:22 -0700
Subject: [PATCH] MachineConstantPool::getConstantPoolIndex - use maps to cache
lookups
---
.../llvm/CodeGen/MachineConstantPool.h | 6 +
llvm/lib/CodeGen/MachineFunction.cpp | 105 ++++++++++--------
2 files changed, 67 insertions(+), 44 deletions(-)
diff --git a/llvm/include/llvm/CodeGen/MachineConstantPool.h b/llvm/include/llvm/CodeGen/MachineConstantPool.h
index a9bc0ce300b22d..26a68c0a62a5d1 100644
--- a/llvm/include/llvm/CodeGen/MachineConstantPool.h
+++ b/llvm/include/llvm/CodeGen/MachineConstantPool.h
@@ -117,6 +117,8 @@ class MachineConstantPoolEntry {
class MachineConstantPool {
Align PoolAlignment; ///< The alignment for the pool.
std::vector<MachineConstantPoolEntry> Constants; ///< The pool of constants.
+ // Map from existing constant pool constants to their slot indices
+ DenseMap<const Constant *, unsigned> SharableConstants;
/// MachineConstantPoolValues that use an existing MachineConstantPoolEntry.
DenseSet<MachineConstantPoolValue*> MachineCPVsSharingEntries;
const DataLayout &DL;
@@ -133,6 +135,10 @@ class MachineConstantPool {
/// first element must be aligned.
Align getConstantPoolAlign() const { return PoolAlignment; }
+ std::optional<unsigned>
+ FindSharableConstantPoolEntry(const Constant *B, const Constant *&FoldedB,
+ const DataLayout &DL);
+
/// getConstantPoolIndex - Create a new entry in the constant pool or return
/// an existing one. User must specify the minimum required alignment for
/// the object.
diff --git a/llvm/lib/CodeGen/MachineFunction.cpp b/llvm/lib/CodeGen/MachineFunction.cpp
index ab45663436cedc..6314be0e128bf6 100644
--- a/llvm/lib/CodeGen/MachineFunction.cpp
+++ b/llvm/lib/CodeGen/MachineFunction.cpp
@@ -1428,56 +1428,70 @@ MachineConstantPool::~MachineConstantPool() {
}
}
-/// Test whether the given two constants can be allocated the same constant pool
-/// entry referenced by \param A.
-static bool CanShareConstantPoolEntry(const Constant *A, const Constant *B,
- const DataLayout &DL) {
- // Handle the trivial case quickly.
- if (A == B) return true;
-
- // If they have the same type but weren't the same constant, quickly
- // reject them.
- if (A->getType() == B->getType()) return false;
-
+static bool IsSharableConstant(const Constant *C, const DataLayout &DL) {
// We can't handle structs or arrays.
- if (isa<StructType>(A->getType()) || isa<ArrayType>(A->getType()) ||
- isa<StructType>(B->getType()) || isa<ArrayType>(B->getType()))
+ if (isa<StructType>(C->getType()) || isa<ArrayType>(C->getType()))
return false;
- // For now, only support constants with the same size.
- uint64_t StoreSize = DL.getTypeStoreSize(A->getType());
- if (StoreSize != DL.getTypeStoreSize(B->getType()) || StoreSize > 128)
+ uint64_t StoreSize = DL.getTypeStoreSize(C->getType());
+ if (StoreSize > 128)
return false;
+ return true;
+}
- bool ContainsUndefOrPoisonA = A->containsUndefOrPoisonElement();
+/// Find another constant that can be allocated the same constant pool
+/// entry referenced by \param B.
+std::optional<unsigned> MachineConstantPool::FindSharableConstantPoolEntry(
+ const Constant *B, const Constant *&FoldedB, const DataLayout &DL) {
+ // Handle the trivial case quickly.
+ if (auto It = SharableConstants.find(B); It != SharableConstants.end()) {
+ const Constant *A = Constants[It->second].Val.ConstVal;
+ if (A == B)
+ return It->second;
+ }
- Type *IntTy = IntegerType::get(A->getContext(), StoreSize*8);
+ if (!IsSharableConstant(B, DL))
+ return std::nullopt;
+
+ uint64_t StoreSize = DL.getTypeStoreSize(B->getType());
+ Type *IntTy = IntegerType::get(B->getContext(), StoreSize * 8);
// Try constant folding a bitcast of both instructions to an integer. If we
// get two identical ConstantInt's, then we are good to share them. We use
// the constant folding APIs to do this so that we get the benefit of
// DataLayout.
- if (isa<PointerType>(A->getType()))
- A = ConstantFoldCastOperand(Instruction::PtrToInt,
- const_cast<Constant *>(A), IntTy, DL);
- else if (A->getType() != IntTy)
- A = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(A),
- IntTy, DL);
if (isa<PointerType>(B->getType()))
- B = ConstantFoldCastOperand(Instruction::PtrToInt,
- const_cast<Constant *>(B), IntTy, DL);
+ FoldedB = ConstantFoldCastOperand(Instruction::PtrToInt,
+ const_cast<Constant *>(B), IntTy, DL);
else if (B->getType() != IntTy)
- B = ConstantFoldCastOperand(Instruction::BitCast, const_cast<Constant *>(B),
- IntTy, DL);
-
- if (A != B)
- return false;
+ FoldedB = ConstantFoldCastOperand(Instruction::BitCast,
+ const_cast<Constant *>(B), IntTy, DL);
+ else
+ FoldedB = B;
+
+ if (auto It = SharableConstants.find(FoldedB);
+ It != SharableConstants.end()) {
+ const Constant *A = Constants[It->second].Val.ConstVal;
+ // If they have the same type but weren't the same constant, quickly
+ // reject them.
+ if (A->getType() == B->getType())
+ return std::nullopt;
+
+ // Constants only safely match if A doesn't contain undef/poison.
+ // As we'll be reusing A, it doesn't matter if B contain undef/poison.
+ // TODO: Handle cases where A and B have the same undef/poison elements.
+ // TODO: Merge A and B with mismatching undef/poison elements.
+ if (A->containsUndefOrPoisonElement())
+ return std::nullopt;
+
+ // For now, only support constants with the same size.
+ if (StoreSize != DL.getTypeStoreSize(A->getType()))
+ return std::nullopt;
+
+ return SharableConstants[A];
+ }
- // Constants only safely match if A doesn't contain undef/poison.
- // As we'll be reusing A, it doesn't matter if B contain undef/poison.
- // TODO: Handle cases where A and B have the same undef/poison elements.
- // TODO: Merge A and B with mismatching undef/poison elements.
- return !ContainsUndefOrPoisonA;
+ return std::nullopt;
}
/// Create a new entry in the constant pool or return an existing one.
@@ -1488,16 +1502,19 @@ unsigned MachineConstantPool::getConstantPoolIndex(const Constant *C,
// Check to see if we already have this constant.
//
- // FIXME, this could be made much more efficient for large constant pools.
- for (unsigned i = 0, e = Constants.size(); i != e; ++i)
- if (!Constants[i].isMachineConstantPoolEntry() &&
- CanShareConstantPoolEntry(Constants[i].Val.ConstVal, C, DL)) {
- if (Constants[i].getAlign() < Alignment)
- Constants[i].Alignment = Alignment;
- return i;
- }
+ const Constant *FoldedC = nullptr;
+ if (auto Entry = FindSharableConstantPoolEntry(C, FoldedC, DL)) {
+ unsigned i = *Entry;
+ if (Constants[i].getAlign() < Alignment)
+ Constants[i].Alignment = Alignment;
+ return i;
+ }
Constants.push_back(MachineConstantPoolEntry(C, Alignment));
+ SharableConstants[C] = Constants.size() - 1;
+ if (IsSharableConstant(C, DL)) {
+ SharableConstants[FoldedC] = Constants.size() - 1;
+ }
return Constants.size()-1;
}
More information about the llvm-commits
mailing list