[llvm] 4f2f7e8 - Analysis: Reorder code in isDereferenceableAndAlignedPointer
Matt Arsenault via llvm-commits
llvm-commits at lists.llvm.org
Fri Nov 11 16:39:02 PST 2022
Author: Matt Arsenault
Date: 2022-11-11T16:38:51-08:00
New Revision: 4f2f7e84ff6e01a3b51fa0450d522a6d35e7e726
URL: https://github.com/llvm/llvm-project/commit/4f2f7e84ff6e01a3b51fa0450d522a6d35e7e726
DIFF: https://github.com/llvm/llvm-project/commit/4f2f7e84ff6e01a3b51fa0450d522a6d35e7e726.diff
LOG: Analysis: Reorder code in isDereferenceableAndAlignedPointer
GEPs should be the most common and basic case, so try that first.
Added:
Modified:
llvm/lib/Analysis/Loads.cpp
Removed:
################################################################################
diff --git a/llvm/lib/Analysis/Loads.cpp b/llvm/lib/Analysis/Loads.cpp
index bc16c00c53206..24c4cff8a8ec5 100644
--- a/llvm/lib/Analysis/Loads.cpp
+++ b/llvm/lib/Analysis/Loads.cpp
@@ -54,6 +54,37 @@ static bool isDereferenceableAndAlignedPointer(
// Note that it is not safe to speculate into a malloc'd region because
// malloc may return null.
+ // For GEPs, determine if the indexing lands within the allocated object.
+ if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
+ const Value *Base = GEP->getPointerOperand();
+
+ APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0);
+ if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
+ !Offset.urem(APInt(Offset.getBitWidth(), Alignment.value()))
+ .isMinValue())
+ return false;
+
+ // If the base pointer is dereferenceable for Offset+Size bytes, then the
+ // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
+ // pointer is aligned to Align bytes, and the Offset is divisible by Align
+ // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
+ // aligned to Align bytes.
+
+ // Offset and Size may have
diff erent bit widths if we have visited an
+ // addrspacecast, so we can't do arithmetic directly on the APInt values.
+ return isDereferenceableAndAlignedPointer(
+ Base, Alignment, Offset + Size.sextOrTrunc(Offset.getBitWidth()), DL,
+ CtxI, AC, DT, TLI, Visited, MaxDepth);
+ }
+
+ // bitcast instructions are no-ops as far as dereferenceability is concerned.
+ if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
+ if (BC->getSrcTy()->isPointerTy())
+ return isDereferenceableAndAlignedPointer(
+ BC->getOperand(0), Alignment, Size, DL, CtxI, AC, DT, TLI,
+ Visited, MaxDepth);
+ }
+
// Recurse into both hands of select.
if (const SelectInst *Sel = dyn_cast<SelectInst>(V)) {
return isDereferenceableAndAlignedPointer(Sel->getTrueValue(), Alignment,
@@ -64,14 +95,6 @@ static bool isDereferenceableAndAlignedPointer(
Visited, MaxDepth);
}
- // bitcast instructions are no-ops as far as dereferenceability is concerned.
- if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
- if (BC->getSrcTy()->isPointerTy())
- return isDereferenceableAndAlignedPointer(BC->getOperand(0), Alignment,
- Size, DL, CtxI, AC, DT, TLI,
- Visited, MaxDepth);
- }
-
bool CheckForNonNull, CheckForFreed;
APInt KnownDerefBytes(Size.getBitWidth(),
V->getPointerDereferenceableBytes(DL, CheckForNonNull,
@@ -86,64 +109,9 @@ static bool isDereferenceableAndAlignedPointer(
return isAligned(V, Offset, Alignment, DL);
}
- if (CtxI) {
- /// Look through assumes to see if both dereferencability and alignment can
- /// be provent by an assume
- RetainedKnowledge AlignRK;
- RetainedKnowledge DerefRK;
- if (getKnowledgeForValue(
- V, {Attribute::Dereferenceable, Attribute::Alignment}, AC,
- [&](RetainedKnowledge RK, Instruction *Assume, auto) {
- if (!isValidAssumeForContext(Assume, CtxI))
- return false;
- if (RK.AttrKind == Attribute::Alignment)
- AlignRK = std::max(AlignRK, RK);
- if (RK.AttrKind == Attribute::Dereferenceable)
- DerefRK = std::max(DerefRK, RK);
- if (AlignRK && DerefRK && AlignRK.ArgValue >= Alignment.value() &&
- DerefRK.ArgValue >= Size.getZExtValue())
- return true; // We have found what we needed so we stop looking
- return false; // Other assumes may have better information. so
- // keep looking
- }))
- return true;
- }
/// TODO refactor this function to be able to search independently for
/// Dereferencability and Alignment requirements.
- // For GEPs, determine if the indexing lands within the allocated object.
- if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
- const Value *Base = GEP->getPointerOperand();
-
- APInt Offset(DL.getIndexTypeSizeInBits(GEP->getType()), 0);
- if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
- !Offset.urem(APInt(Offset.getBitWidth(), Alignment.value()))
- .isMinValue())
- return false;
-
- // If the base pointer is dereferenceable for Offset+Size bytes, then the
- // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
- // pointer is aligned to Align bytes, and the Offset is divisible by Align
- // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
- // aligned to Align bytes.
-
- // Offset and Size may have
diff erent bit widths if we have visited an
- // addrspacecast, so we can't do arithmetic directly on the APInt values.
- return isDereferenceableAndAlignedPointer(
- Base, Alignment, Offset + Size.sextOrTrunc(Offset.getBitWidth()), DL,
- CtxI, AC, DT, TLI, Visited, MaxDepth);
- }
-
- // For gc.relocate, look through relocations
- if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
- return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(),
- Alignment, Size, DL, CtxI, AC, DT,
- TLI, Visited, MaxDepth);
-
- if (const AddrSpaceCastOperator *ASC = dyn_cast<AddrSpaceCastOperator>(V))
- return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
- Size, DL, CtxI, AC, DT, TLI,
- Visited, MaxDepth);
if (const auto *Call = dyn_cast<CallBase>(V)) {
if (auto *RP = getArgumentAliasingToReturnedPointer(Call, true))
@@ -178,6 +146,40 @@ static bool isDereferenceableAndAlignedPointer(
}
}
+ // For gc.relocate, look through relocations
+ if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
+ return isDereferenceableAndAlignedPointer(RelocateInst->getDerivedPtr(),
+ Alignment, Size, DL, CtxI, AC, DT,
+ TLI, Visited, MaxDepth);
+
+ if (const AddrSpaceCastOperator *ASC = dyn_cast<AddrSpaceCastOperator>(V))
+ return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Alignment,
+ Size, DL, CtxI, AC, DT, TLI,
+ Visited, MaxDepth);
+
+ if (CtxI) {
+ /// Look through assumes to see if both dereferencability and alignment can
+ /// be provent by an assume
+ RetainedKnowledge AlignRK;
+ RetainedKnowledge DerefRK;
+ if (getKnowledgeForValue(
+ V, {Attribute::Dereferenceable, Attribute::Alignment}, AC,
+ [&](RetainedKnowledge RK, Instruction *Assume, auto) {
+ if (!isValidAssumeForContext(Assume, CtxI))
+ return false;
+ if (RK.AttrKind == Attribute::Alignment)
+ AlignRK = std::max(AlignRK, RK);
+ if (RK.AttrKind == Attribute::Dereferenceable)
+ DerefRK = std::max(DerefRK, RK);
+ if (AlignRK && DerefRK && AlignRK.ArgValue >= Alignment.value() &&
+ DerefRK.ArgValue >= Size.getZExtValue())
+ return true; // We have found what we needed so we stop looking
+ return false; // Other assumes may have better information. so
+ // keep looking
+ }))
+ return true;
+ }
+
// If we don't know, assume the worst.
return false;
}
More information about the llvm-commits
mailing list