[llvm-branch-commits] [llvm] release/19.x: [llvm] Fix __builtin_object_size interaction between Negative Offset … (#111827) (PR #114786)
via llvm-branch-commits
llvm-branch-commits at lists.llvm.org
Mon Nov 4 04:55:14 PST 2024
https://github.com/llvmbot created https://github.com/llvm/llvm-project/pull/114786
Backport 01a103b0b9c449e8dec17950835991757d1c4f88
Requested by: @hvdijk
>From c228140e0bfddb21d24535e6c81cad00f46db749 Mon Sep 17 00:00:00 2001
From: serge-sans-paille <sguelton at mozilla.com>
Date: Sat, 2 Nov 2024 09:14:35 +0000
Subject: [PATCH] =?UTF-8?q?[llvm]=20Fix=20=5F=5Fbuiltin=5Fobject=5Fsize=20?=
=?UTF-8?q?interaction=20between=20Negative=20Offset=20=E2=80=A6=20(#11182?=
=?UTF-8?q?7)?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
…and Select/Phi
When picking a SizeOffsetAPInt through combineSizeOffset, the behavior
differs if we're going to apply a constant offset that's positive or
negative: If it's positive, then we need to compare the remaining bytes
(i.e. Size
- Offset), but if it's negative, we need to compare the preceding bytes
(i.e. Offset).
Fix #111709
(cherry picked from commit 01a103b0b9c449e8dec17950835991757d1c4f88)
---
llvm/include/llvm/Analysis/MemoryBuiltins.h | 71 +++--
llvm/lib/Analysis/MemoryBuiltins.cpp | 145 +++++-----
.../builtin-object-size-phi.ll | 254 ++++++++++++++++++
.../objectsize_basic.ll | 24 ++
4 files changed, 405 insertions(+), 89 deletions(-)
diff --git a/llvm/include/llvm/Analysis/MemoryBuiltins.h b/llvm/include/llvm/Analysis/MemoryBuiltins.h
index bb282a1b73d320..a21f116db7e70d 100644
--- a/llvm/include/llvm/Analysis/MemoryBuiltins.h
+++ b/llvm/include/llvm/Analysis/MemoryBuiltins.h
@@ -222,21 +222,43 @@ struct SizeOffsetAPInt : public SizeOffsetType<APInt, SizeOffsetAPInt> {
static bool known(const APInt &V) { return V.getBitWidth() > 1; }
};
+/// OffsetSpan - Used internally by \p ObjectSizeOffsetVisitor. Represents a
+/// point in memory as a pair of allocated bytes before and after it.
+struct OffsetSpan {
+ APInt Before; /// Number of allocated bytes before this point.
+ APInt After; /// Number of allocated bytes after this point.
+
+ OffsetSpan() = default;
+ OffsetSpan(APInt Before, APInt After) : Before(Before), After(After) {}
+
+ bool knownBefore() const { return known(Before); }
+ bool knownAfter() const { return known(After); }
+ bool anyKnown() const { return knownBefore() || knownAfter(); }
+ bool bothKnown() const { return knownBefore() && knownAfter(); }
+
+ bool operator==(const OffsetSpan &RHS) const {
+ return Before == RHS.Before && After == RHS.After;
+ }
+ bool operator!=(const OffsetSpan &RHS) const { return !(*this == RHS); }
+
+ static bool known(const APInt &V) { return V.getBitWidth() > 1; }
+};
+
/// Evaluate the size and offset of an object pointed to by a Value*
/// statically. Fails if size or offset are not known at compile time.
class ObjectSizeOffsetVisitor
- : public InstVisitor<ObjectSizeOffsetVisitor, SizeOffsetAPInt> {
+ : public InstVisitor<ObjectSizeOffsetVisitor, OffsetSpan> {
const DataLayout &DL;
const TargetLibraryInfo *TLI;
ObjectSizeOpts Options;
unsigned IntTyBits;
APInt Zero;
- SmallDenseMap<Instruction *, SizeOffsetAPInt, 8> SeenInsts;
+ SmallDenseMap<Instruction *, OffsetSpan, 8> SeenInsts;
unsigned InstructionsVisited;
APInt align(APInt Size, MaybeAlign Align);
- static SizeOffsetAPInt unknown() { return SizeOffsetAPInt(); }
+ static OffsetSpan unknown() { return OffsetSpan(); }
public:
ObjectSizeOffsetVisitor(const DataLayout &DL, const TargetLibraryInfo *TLI,
@@ -246,29 +268,30 @@ class ObjectSizeOffsetVisitor
// These are "private", except they can't actually be made private. Only
// compute() should be used by external users.
- SizeOffsetAPInt visitAllocaInst(AllocaInst &I);
- SizeOffsetAPInt visitArgument(Argument &A);
- SizeOffsetAPInt visitCallBase(CallBase &CB);
- SizeOffsetAPInt visitConstantPointerNull(ConstantPointerNull &);
- SizeOffsetAPInt visitExtractElementInst(ExtractElementInst &I);
- SizeOffsetAPInt visitExtractValueInst(ExtractValueInst &I);
- SizeOffsetAPInt visitGlobalAlias(GlobalAlias &GA);
- SizeOffsetAPInt visitGlobalVariable(GlobalVariable &GV);
- SizeOffsetAPInt visitIntToPtrInst(IntToPtrInst &);
- SizeOffsetAPInt visitLoadInst(LoadInst &I);
- SizeOffsetAPInt visitPHINode(PHINode &);
- SizeOffsetAPInt visitSelectInst(SelectInst &I);
- SizeOffsetAPInt visitUndefValue(UndefValue &);
- SizeOffsetAPInt visitInstruction(Instruction &I);
+ OffsetSpan visitAllocaInst(AllocaInst &I);
+ OffsetSpan visitArgument(Argument &A);
+ OffsetSpan visitCallBase(CallBase &CB);
+ OffsetSpan visitConstantPointerNull(ConstantPointerNull &);
+ OffsetSpan visitExtractElementInst(ExtractElementInst &I);
+ OffsetSpan visitExtractValueInst(ExtractValueInst &I);
+ OffsetSpan visitGlobalAlias(GlobalAlias &GA);
+ OffsetSpan visitGlobalVariable(GlobalVariable &GV);
+ OffsetSpan visitIntToPtrInst(IntToPtrInst &);
+ OffsetSpan visitLoadInst(LoadInst &I);
+ OffsetSpan visitPHINode(PHINode &);
+ OffsetSpan visitSelectInst(SelectInst &I);
+ OffsetSpan visitUndefValue(UndefValue &);
+ OffsetSpan visitInstruction(Instruction &I);
private:
- SizeOffsetAPInt findLoadSizeOffset(
- LoadInst &LoadFrom, BasicBlock &BB, BasicBlock::iterator From,
- SmallDenseMap<BasicBlock *, SizeOffsetAPInt, 8> &VisitedBlocks,
- unsigned &ScannedInstCount);
- SizeOffsetAPInt combineSizeOffset(SizeOffsetAPInt LHS, SizeOffsetAPInt RHS);
- SizeOffsetAPInt computeImpl(Value *V);
- SizeOffsetAPInt computeValue(Value *V);
+ OffsetSpan
+ findLoadOffsetRange(LoadInst &LoadFrom, BasicBlock &BB,
+ BasicBlock::iterator From,
+ SmallDenseMap<BasicBlock *, OffsetSpan, 8> &VisitedBlocks,
+ unsigned &ScannedInstCount);
+ OffsetSpan combineOffsetRange(OffsetSpan LHS, OffsetSpan RHS);
+ OffsetSpan computeImpl(Value *V);
+ OffsetSpan computeValue(Value *V);
bool CheckedZextOrTrunc(APInt &I);
};
diff --git a/llvm/lib/Analysis/MemoryBuiltins.cpp b/llvm/lib/Analysis/MemoryBuiltins.cpp
index 1edc51e9ce5da3..44cb2d0942f460 100644
--- a/llvm/lib/Analysis/MemoryBuiltins.cpp
+++ b/llvm/lib/Analysis/MemoryBuiltins.cpp
@@ -700,10 +700,21 @@ ObjectSizeOffsetVisitor::ObjectSizeOffsetVisitor(const DataLayout &DL,
SizeOffsetAPInt ObjectSizeOffsetVisitor::compute(Value *V) {
InstructionsVisited = 0;
- return computeImpl(V);
+ OffsetSpan Span = computeImpl(V);
+
+ // In ExactSizeFromOffset mode, we don't care about the Before Field, so allow
+ // us to overwrite it if needs be.
+ if (Span.knownAfter() && !Span.knownBefore() &&
+ Options.EvalMode == ObjectSizeOpts::Mode::ExactSizeFromOffset)
+ Span.Before = APInt::getZero(Span.After.getBitWidth());
+
+ if (!Span.bothKnown())
+ return {};
+
+ return {Span.Before + Span.After, Span.Before};
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::computeImpl(Value *V) {
+OffsetSpan ObjectSizeOffsetVisitor::computeImpl(Value *V) {
unsigned InitialIntTyBits = DL.getIndexTypeSizeInBits(V->getType());
// Stripping pointer casts can strip address space casts which can change the
@@ -720,28 +731,28 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::computeImpl(Value *V) {
IntTyBits = DL.getIndexTypeSizeInBits(V->getType());
Zero = APInt::getZero(IntTyBits);
- SizeOffsetAPInt SOT = computeValue(V);
+ OffsetSpan ORT = computeValue(V);
bool IndexTypeSizeChanged = InitialIntTyBits != IntTyBits;
if (!IndexTypeSizeChanged && Offset.isZero())
- return SOT;
+ return ORT;
// We stripped an address space cast that changed the index type size or we
// accumulated some constant offset (or both). Readjust the bit width to match
// the argument index type size and apply the offset, as required.
if (IndexTypeSizeChanged) {
- if (SOT.knownSize() && !::CheckedZextOrTrunc(SOT.Size, InitialIntTyBits))
- SOT.Size = APInt();
- if (SOT.knownOffset() &&
- !::CheckedZextOrTrunc(SOT.Offset, InitialIntTyBits))
- SOT.Offset = APInt();
+ if (ORT.knownBefore() &&
+ !::CheckedZextOrTrunc(ORT.Before, InitialIntTyBits))
+ ORT.Before = APInt();
+ if (ORT.knownAfter() && !::CheckedZextOrTrunc(ORT.After, InitialIntTyBits))
+ ORT.After = APInt();
}
- // If the computed offset is "unknown" we cannot add the stripped offset.
- return {SOT.Size,
- SOT.Offset.getBitWidth() > 1 ? SOT.Offset + Offset : SOT.Offset};
+ // If the computed bound is "unknown" we cannot add the stripped offset.
+ return {(ORT.knownBefore() ? ORT.Before + Offset : ORT.Before),
+ (ORT.knownAfter() ? ORT.After - Offset : ORT.After)};
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::computeValue(Value *V) {
+OffsetSpan ObjectSizeOffsetVisitor::computeValue(Value *V) {
if (Instruction *I = dyn_cast<Instruction>(V)) {
// If we have already seen this instruction, bail out. Cycles can happen in
// unreachable code after constant propagation.
@@ -751,7 +762,7 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::computeValue(Value *V) {
++InstructionsVisited;
if (InstructionsVisited > ObjectSizeOffsetVisitorMaxVisitInstructions)
return ObjectSizeOffsetVisitor::unknown();
- SizeOffsetAPInt Res = visit(*I);
+ OffsetSpan Res = visit(*I);
// Cache the result for later visits. If we happened to visit this during
// the above recursion, we would consider it unknown until now.
SeenInsts[I] = Res;
@@ -777,13 +788,13 @@ bool ObjectSizeOffsetVisitor::CheckedZextOrTrunc(APInt &I) {
return ::CheckedZextOrTrunc(I, IntTyBits);
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) {
+OffsetSpan ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) {
TypeSize ElemSize = DL.getTypeAllocSize(I.getAllocatedType());
if (ElemSize.isScalable() && Options.EvalMode != ObjectSizeOpts::Mode::Min)
return ObjectSizeOffsetVisitor::unknown();
APInt Size(IntTyBits, ElemSize.getKnownMinValue());
if (!I.isArrayAllocation())
- return SizeOffsetAPInt(align(Size, I.getAlign()), Zero);
+ return OffsetSpan(Zero, align(Size, I.getAlign()));
Value *ArraySize = I.getArraySize();
if (const ConstantInt *C = dyn_cast<ConstantInt>(ArraySize)) {
@@ -794,12 +805,12 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) {
bool Overflow;
Size = Size.umul_ov(NumElems, Overflow);
return Overflow ? ObjectSizeOffsetVisitor::unknown()
- : SizeOffsetAPInt(align(Size, I.getAlign()), Zero);
+ : OffsetSpan(Zero, align(Size, I.getAlign()));
}
return ObjectSizeOffsetVisitor::unknown();
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitArgument(Argument &A) {
+OffsetSpan ObjectSizeOffsetVisitor::visitArgument(Argument &A) {
Type *MemoryTy = A.getPointeeInMemoryValueType();
// No interprocedural analysis is done at the moment.
if (!MemoryTy|| !MemoryTy->isSized()) {
@@ -808,16 +819,16 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::visitArgument(Argument &A) {
}
APInt Size(IntTyBits, DL.getTypeAllocSize(MemoryTy));
- return SizeOffsetAPInt(align(Size, A.getParamAlign()), Zero);
+ return OffsetSpan(Zero, align(Size, A.getParamAlign()));
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitCallBase(CallBase &CB) {
+OffsetSpan ObjectSizeOffsetVisitor::visitCallBase(CallBase &CB) {
if (std::optional<APInt> Size = getAllocSize(&CB, TLI))
- return SizeOffsetAPInt(*Size, Zero);
+ return OffsetSpan(Zero, *Size);
return ObjectSizeOffsetVisitor::unknown();
}
-SizeOffsetAPInt
+OffsetSpan
ObjectSizeOffsetVisitor::visitConstantPointerNull(ConstantPointerNull &CPN) {
// If null is unknown, there's nothing we can do. Additionally, non-zero
// address spaces can make use of null, so we don't presume to know anything
@@ -828,45 +839,43 @@ ObjectSizeOffsetVisitor::visitConstantPointerNull(ConstantPointerNull &CPN) {
// addrspace(1) gets casted to addrspace(0) (or vice-versa).
if (Options.NullIsUnknownSize || CPN.getType()->getAddressSpace())
return ObjectSizeOffsetVisitor::unknown();
- return SizeOffsetAPInt(Zero, Zero);
+ return OffsetSpan(Zero, Zero);
}
-SizeOffsetAPInt
+OffsetSpan
ObjectSizeOffsetVisitor::visitExtractElementInst(ExtractElementInst &) {
return ObjectSizeOffsetVisitor::unknown();
}
-SizeOffsetAPInt
-ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst &) {
+OffsetSpan ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst &) {
// Easy cases were already folded by previous passes.
return ObjectSizeOffsetVisitor::unknown();
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) {
+OffsetSpan ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) {
if (GA.isInterposable())
return ObjectSizeOffsetVisitor::unknown();
return computeImpl(GA.getAliasee());
}
-SizeOffsetAPInt
-ObjectSizeOffsetVisitor::visitGlobalVariable(GlobalVariable &GV) {
+OffsetSpan ObjectSizeOffsetVisitor::visitGlobalVariable(GlobalVariable &GV) {
if (!GV.getValueType()->isSized() || GV.hasExternalWeakLinkage() ||
((!GV.hasInitializer() || GV.isInterposable()) &&
Options.EvalMode != ObjectSizeOpts::Mode::Min))
return ObjectSizeOffsetVisitor::unknown();
APInt Size(IntTyBits, DL.getTypeAllocSize(GV.getValueType()));
- return SizeOffsetAPInt(align(Size, GV.getAlign()), Zero);
+ return OffsetSpan(Zero, align(Size, GV.getAlign()));
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitIntToPtrInst(IntToPtrInst &) {
+OffsetSpan ObjectSizeOffsetVisitor::visitIntToPtrInst(IntToPtrInst &) {
// clueless
return ObjectSizeOffsetVisitor::unknown();
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::findLoadSizeOffset(
+OffsetSpan ObjectSizeOffsetVisitor::findLoadOffsetRange(
LoadInst &Load, BasicBlock &BB, BasicBlock::iterator From,
- SmallDenseMap<BasicBlock *, SizeOffsetAPInt, 8> &VisitedBlocks,
+ SmallDenseMap<BasicBlock *, OffsetSpan, 8> &VisitedBlocks,
unsigned &ScannedInstCount) {
constexpr unsigned MaxInstsToScan = 128;
@@ -877,7 +886,7 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::findLoadSizeOffset(
auto Unknown = [&BB, &VisitedBlocks]() {
return VisitedBlocks[&BB] = ObjectSizeOffsetVisitor::unknown();
};
- auto Known = [&BB, &VisitedBlocks](SizeOffsetAPInt SO) {
+ auto Known = [&BB, &VisitedBlocks](OffsetSpan SO) {
return VisitedBlocks[&BB] = SO;
};
@@ -948,15 +957,15 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::findLoadSizeOffset(
if (!C)
return Unknown();
- return Known({C->getValue(), APInt(C->getValue().getBitWidth(), 0)});
+ return Known({APInt(C->getValue().getBitWidth(), 0), C->getValue()});
}
return Unknown();
} while (From-- != BB.begin());
- SmallVector<SizeOffsetAPInt> PredecessorSizeOffsets;
+ SmallVector<OffsetSpan> PredecessorSizeOffsets;
for (auto *PredBB : predecessors(&BB)) {
- PredecessorSizeOffsets.push_back(findLoadSizeOffset(
+ PredecessorSizeOffsets.push_back(findLoadOffsetRange(
Load, *PredBB, BasicBlock::iterator(PredBB->getTerminator()),
VisitedBlocks, ScannedInstCount));
if (!PredecessorSizeOffsets.back().bothKnown())
@@ -968,70 +977,70 @@ SizeOffsetAPInt ObjectSizeOffsetVisitor::findLoadSizeOffset(
return Known(std::accumulate(
PredecessorSizeOffsets.begin() + 1, PredecessorSizeOffsets.end(),
- PredecessorSizeOffsets.front(),
- [this](SizeOffsetAPInt LHS, SizeOffsetAPInt RHS) {
- return combineSizeOffset(LHS, RHS);
+ PredecessorSizeOffsets.front(), [this](OffsetSpan LHS, OffsetSpan RHS) {
+ return combineOffsetRange(LHS, RHS);
}));
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitLoadInst(LoadInst &LI) {
+OffsetSpan ObjectSizeOffsetVisitor::visitLoadInst(LoadInst &LI) {
if (!Options.AA) {
++ObjectVisitorLoad;
return ObjectSizeOffsetVisitor::unknown();
}
- SmallDenseMap<BasicBlock *, SizeOffsetAPInt, 8> VisitedBlocks;
+ SmallDenseMap<BasicBlock *, OffsetSpan, 8> VisitedBlocks;
unsigned ScannedInstCount = 0;
- SizeOffsetAPInt SO =
- findLoadSizeOffset(LI, *LI.getParent(), BasicBlock::iterator(LI),
- VisitedBlocks, ScannedInstCount);
+ OffsetSpan SO =
+ findLoadOffsetRange(LI, *LI.getParent(), BasicBlock::iterator(LI),
+ VisitedBlocks, ScannedInstCount);
if (!SO.bothKnown())
++ObjectVisitorLoad;
return SO;
}
-SizeOffsetAPInt
-ObjectSizeOffsetVisitor::combineSizeOffset(SizeOffsetAPInt LHS,
- SizeOffsetAPInt RHS) {
+OffsetSpan ObjectSizeOffsetVisitor::combineOffsetRange(OffsetSpan LHS,
+ OffsetSpan RHS) {
if (!LHS.bothKnown() || !RHS.bothKnown())
return ObjectSizeOffsetVisitor::unknown();
switch (Options.EvalMode) {
case ObjectSizeOpts::Mode::Min:
- return (getSizeWithOverflow(LHS).slt(getSizeWithOverflow(RHS))) ? LHS : RHS;
- case ObjectSizeOpts::Mode::Max:
- return (getSizeWithOverflow(LHS).sgt(getSizeWithOverflow(RHS))) ? LHS : RHS;
+ return {LHS.Before.slt(RHS.Before) ? LHS.Before : RHS.Before,
+ LHS.After.slt(RHS.After) ? LHS.After : RHS.After};
+ case ObjectSizeOpts::Mode::Max: {
+ return {LHS.Before.sgt(RHS.Before) ? LHS.Before : RHS.Before,
+ LHS.After.sgt(RHS.After) ? LHS.After : RHS.After};
+ }
case ObjectSizeOpts::Mode::ExactSizeFromOffset:
- return (getSizeWithOverflow(LHS).eq(getSizeWithOverflow(RHS)))
- ? LHS
- : ObjectSizeOffsetVisitor::unknown();
+ return {LHS.Before.eq(RHS.Before) ? LHS.Before : APInt(),
+ LHS.After.eq(RHS.After) ? LHS.After : APInt()};
case ObjectSizeOpts::Mode::ExactUnderlyingSizeAndOffset:
- return LHS == RHS ? LHS : ObjectSizeOffsetVisitor::unknown();
+ return (LHS == RHS) ? LHS : ObjectSizeOffsetVisitor::unknown();
}
llvm_unreachable("missing an eval mode");
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitPHINode(PHINode &PN) {
+OffsetSpan ObjectSizeOffsetVisitor::visitPHINode(PHINode &PN) {
if (PN.getNumIncomingValues() == 0)
return ObjectSizeOffsetVisitor::unknown();
auto IncomingValues = PN.incoming_values();
return std::accumulate(IncomingValues.begin() + 1, IncomingValues.end(),
computeImpl(*IncomingValues.begin()),
- [this](SizeOffsetAPInt LHS, Value *VRHS) {
- return combineSizeOffset(LHS, computeImpl(VRHS));
+ [this](OffsetSpan LHS, Value *VRHS) {
+ return combineOffsetRange(LHS, computeImpl(VRHS));
});
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitSelectInst(SelectInst &I) {
- return combineSizeOffset(computeImpl(I.getTrueValue()),
- computeImpl(I.getFalseValue()));
+OffsetSpan ObjectSizeOffsetVisitor::visitSelectInst(SelectInst &I) {
+ return combineOffsetRange(computeImpl(I.getTrueValue()),
+ computeImpl(I.getFalseValue()));
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitUndefValue(UndefValue &) {
- return SizeOffsetAPInt(Zero, Zero);
+OffsetSpan ObjectSizeOffsetVisitor::visitUndefValue(UndefValue &) {
+ return OffsetSpan(Zero, Zero);
}
-SizeOffsetAPInt ObjectSizeOffsetVisitor::visitInstruction(Instruction &I) {
+OffsetSpan ObjectSizeOffsetVisitor::visitInstruction(Instruction &I) {
LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor unknown instruction:" << I
<< '\n');
return ObjectSizeOffsetVisitor::unknown();
@@ -1084,7 +1093,13 @@ SizeOffsetValue ObjectSizeOffsetEvaluator::compute(Value *V) {
}
SizeOffsetValue ObjectSizeOffsetEvaluator::compute_(Value *V) {
- ObjectSizeOffsetVisitor Visitor(DL, TLI, Context, EvalOpts);
+
+ // Only trust ObjectSizeOffsetVisitor in exact mode, otherwise fallback on
+ // dynamic computation.
+ ObjectSizeOpts VisitorEvalOpts(EvalOpts);
+ VisitorEvalOpts.EvalMode = ObjectSizeOpts::Mode::ExactUnderlyingSizeAndOffset;
+ ObjectSizeOffsetVisitor Visitor(DL, TLI, Context, VisitorEvalOpts);
+
SizeOffsetAPInt Const = Visitor.compute(V);
if (Const.bothKnown())
return SizeOffsetValue(ConstantInt::get(Context, Const.Size),
diff --git a/llvm/test/Transforms/LowerConstantIntrinsics/builtin-object-size-phi.ll b/llvm/test/Transforms/LowerConstantIntrinsics/builtin-object-size-phi.ll
index 4f4d6a88e1693b..2974228e6a8303 100644
--- a/llvm/test/Transforms/LowerConstantIntrinsics/builtin-object-size-phi.ll
+++ b/llvm/test/Transforms/LowerConstantIntrinsics/builtin-object-size-phi.ll
@@ -117,3 +117,257 @@ if.end:
%size = call i64 @llvm.objectsize.i64.p0(ptr %p, i1 true, i1 true, i1 false)
ret i64 %size
}
+
+define i64 @pick_negative_offset(i32 %n) {
+; CHECK-LABEL: @pick_negative_offset(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[BUFFER0:%.*]] = alloca i8, i64 20, align 1
+; CHECK-NEXT: [[OFFSETED0:%.*]] = getelementptr i8, ptr [[BUFFER0]], i64 20
+; CHECK-NEXT: [[COND:%.*]] = icmp eq i32 [[N:%.*]], 0
+; CHECK-NEXT: br i1 [[COND]], label [[IF_ELSE:%.*]], label [[IF_END:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: [[BUFFER1:%.*]] = alloca i8, i64 20, align 1
+; CHECK-NEXT: [[OFFSETED1:%.*]] = getelementptr i8, ptr [[BUFFER1]], i64 20
+; CHECK-NEXT: br label [[IF_END]]
+; CHECK: if.end:
+; CHECK-NEXT: [[P:%.*]] = phi ptr [ [[OFFSETED1]], [[IF_ELSE]] ], [ [[OFFSETED0]], [[ENTRY:%.*]] ]
+; CHECK-NEXT: [[POFFSETED:%.*]] = getelementptr i8, ptr [[P]], i64 -4
+; CHECK-NEXT: ret i64 4
+;
+entry:
+ %buffer0 = alloca i8, i64 20
+ %offseted0 = getelementptr i8, ptr %buffer0, i64 20
+ %cond = icmp eq i32 %n, 0
+ br i1 %cond, label %if.else, label %if.end
+
+if.else:
+ %buffer1 = alloca i8, i64 20
+ %offseted1 = getelementptr i8, ptr %buffer1, i64 20
+ br label %if.end
+
+if.end:
+ %p = phi ptr [ %offseted1, %if.else ], [ %offseted0, %entry ]
+ %poffseted = getelementptr i8, ptr %p, i64 -4
+ %size = call i64 @llvm.objectsize.i64.p0(ptr %poffseted, i1 false, i1 false, i1 false)
+ ret i64 %size
+}
+
+define i64 @pick_negative_offset_different_width(i32 %n) {
+; CHECK-LABEL: @pick_negative_offset_different_width(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[BUFFER0:%.*]] = alloca i8, i64 4, align 1
+; CHECK-NEXT: [[BUFFER1:%.*]] = alloca i8, i64 8, align 1
+; CHECK-NEXT: [[COND:%.*]] = icmp eq i32 [[N:%.*]], 0
+; CHECK-NEXT: br i1 [[COND]], label [[IF_ELSE:%.*]], label [[IF_END:%.*]]
+; CHECK: if.then:
+; CHECK-NEXT: [[OFFSETED0:%.*]] = getelementptr i8, ptr [[BUFFER0]], i64 1
+; CHECK-NEXT: br label [[IF_END1:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: [[OFFSETED1:%.*]] = getelementptr i8, ptr [[BUFFER1]], i64 6
+; CHECK-NEXT: br label [[IF_END1]]
+; CHECK: if.end:
+; CHECK-NEXT: [[P:%.*]] = phi ptr [ [[OFFSETED0]], [[IF_ELSE]] ], [ [[OFFSETED1]], [[IF_END]] ]
+; CHECK-NEXT: [[POFFSETED:%.*]] = getelementptr i8, ptr [[P]], i64 -2
+; CHECK-NEXT: ret i64 5
+;
+entry:
+ %buffer0 = alloca i8, i64 4
+ %buffer1 = alloca i8, i64 8
+ %cond = icmp eq i32 %n, 0
+ br i1 %cond, label %if.then, label %if.else
+
+if.then:
+ %offseted0 = getelementptr i8, ptr %buffer0, i64 1
+ br label %if.end
+
+if.else:
+ %offseted1 = getelementptr i8, ptr %buffer1, i64 6
+ br label %if.end
+
+if.end:
+ %p = phi ptr [ %offseted0, %if.then ], [ %offseted1, %if.else ]
+ %poffseted = getelementptr i8, ptr %p, i64 -2
+ %size = call i64 @llvm.objectsize.i64.p0(ptr %poffseted, i1 false, i1 false, i1 false)
+ ret i64 %size
+}
+
+define i64 @pick_negative_offset_with_nullptr(i32 %n) {
+; CHECK-LABEL: @pick_negative_offset_with_nullptr(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[BUFFER0:%.*]] = alloca i8, i64 20, align 1
+; CHECK-NEXT: [[OFFSETED0:%.*]] = getelementptr i8, ptr [[BUFFER0]], i64 20
+; CHECK-NEXT: [[COND:%.*]] = icmp eq i32 [[N:%.*]], 0
+; CHECK-NEXT: br i1 [[COND]], label [[IF_ELSE:%.*]], label [[IF_END:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: br label [[IF_END]]
+; CHECK: if.end:
+; CHECK-NEXT: [[P0:%.*]] = phi ptr [ [[OFFSETED0]], [[ENTRY:%.*]] ], [ null, [[IF_ELSE]] ]
+; CHECK-NEXT: [[P1:%.*]] = phi ptr [ null, [[IF_ELSE]] ], [ [[OFFSETED0]], [[ENTRY]] ]
+; CHECK-NEXT: [[P0OFFSETED:%.*]] = getelementptr i8, ptr [[P0]], i64 -4
+; CHECK-NEXT: [[P1OFFSETED:%.*]] = getelementptr i8, ptr [[P1]], i64 -4
+; CHECK-NEXT: ret i64 4
+;
+entry:
+ %buffer0 = alloca i8, i64 20
+ %offseted0 = getelementptr i8, ptr %buffer0, i64 20
+ %cond = icmp eq i32 %n, 0
+ br i1 %cond, label %if.else, label %if.end
+
+if.else:
+ br label %if.end
+
+if.end:
+ %p0 = phi ptr [ %offseted0, %entry ], [ null, %if.else ]
+ %p1 = phi ptr [ null, %if.else ], [ %offseted0, %entry ]
+ %p0offseted = getelementptr i8, ptr %p0, i64 -4
+ %p1offseted = getelementptr i8, ptr %p1, i64 -4
+ %size0 = call i64 @llvm.objectsize.i64.p0(ptr %p0offseted, i1 false, i1 false, i1 false)
+ %size1 = call i64 @llvm.objectsize.i64.p0(ptr %p1offseted, i1 false, i1 false, i1 false)
+ %size = select i1 %cond, i64 %size0, i64 %size1
+ ret i64 %size
+}
+
+define i64 @pick_negative_offset_with_unsized_nullptr(i32 %n) {
+; CHECK-LABEL: @pick_negative_offset_with_unsized_nullptr(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[BUFFER0:%.*]] = alloca i8, i64 20, align 1
+; CHECK-NEXT: [[OFFSETED0:%.*]] = getelementptr i8, ptr [[BUFFER0]], i64 20
+; CHECK-NEXT: [[COND:%.*]] = icmp eq i32 [[N:%.*]], 0
+; CHECK-NEXT: br i1 [[COND]], label [[IF_ELSE:%.*]], label [[IF_END:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: br label [[IF_END]]
+; CHECK: if.end:
+; CHECK-NEXT: [[P0:%.*]] = phi ptr [ [[OFFSETED0]], [[ENTRY:%.*]] ], [ null, [[IF_ELSE]] ]
+; CHECK-NEXT: [[P1:%.*]] = phi ptr [ null, [[IF_ELSE]] ], [ [[OFFSETED0]], [[ENTRY]] ]
+; CHECK-NEXT: [[P0OFFSETED:%.*]] = getelementptr i8, ptr [[P0]], i64 -4
+; CHECK-NEXT: [[P1OFFSETED:%.*]] = getelementptr i8, ptr [[P1]], i64 -4
+; CHECK-NEXT: ret i64 -1
+;
+entry:
+ %buffer0 = alloca i8, i64 20
+ %offseted0 = getelementptr i8, ptr %buffer0, i64 20
+ %cond = icmp eq i32 %n, 0
+ br i1 %cond, label %if.else, label %if.end
+
+if.else:
+ br label %if.end
+
+if.end:
+ %p0 = phi ptr [ %offseted0, %entry ], [ null, %if.else ]
+ %p1 = phi ptr [ null, %if.else ], [ %offseted0, %entry ]
+ %p0offseted = getelementptr i8, ptr %p0, i64 -4
+ %p1offseted = getelementptr i8, ptr %p1, i64 -4
+ %size0 = call i64 @llvm.objectsize.i64.p0(ptr %p0offseted, i1 false, i1 true, i1 false)
+ %size1 = call i64 @llvm.objectsize.i64.p0(ptr %p1offseted, i1 false, i1 true, i1 false)
+ %size = select i1 %cond, i64 %size0, i64 %size1
+ ret i64 %size
+}
+
+define i64 @chain_pick_negative_offset_with_nullptr(i32 %x) {
+; CHECK-LABEL: @chain_pick_negative_offset_with_nullptr(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[ARRAY:%.*]] = alloca [4 x i32], align 4
+; CHECK-NEXT: [[C:%.*]] = icmp eq i32 [[X:%.*]], 0
+; CHECK-NEXT: [[P:%.*]] = getelementptr i8, ptr [[ARRAY]], i64 8
+; CHECK-NEXT: [[COND:%.*]] = select i1 [[C]], ptr [[P]], ptr null
+; CHECK-NEXT: [[P4:%.*]] = getelementptr i8, ptr [[COND]], i64 8
+; CHECK-NEXT: [[COND6:%.*]] = select i1 [[C]], ptr [[P4]], ptr null
+; CHECK-NEXT: [[P7:%.*]] = getelementptr i8, ptr [[COND6]], i64 -4
+; CHECK-NEXT: ret i64 4
+;
+entry:
+ %array = alloca [4 x i32]
+ %c = icmp eq i32 %x, 0
+ %p = getelementptr i8, ptr %array, i64 8
+ %cond = select i1 %c, ptr %p, ptr null
+ %p4 = getelementptr i8, ptr %cond, i64 8
+ %cond6 = select i1 %c, ptr %p4, ptr null
+ %p7 = getelementptr i8, ptr %cond6, i64 -4
+ %size = call i64 @llvm.objectsize.i64.p0(ptr %p7, i1 false, i1 false, i1 false)
+ ret i64 %size
+}
+
+
+define i64 @negative_offset_dynamic_eval(i32 %x, i64 %i) {
+; CHECK-LABEL: @negative_offset_dynamic_eval(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[ARRAY1:%.*]] = alloca [4 x i32], align 16
+; CHECK-NEXT: [[ARRAY2:%.*]] = alloca [8 x i32], align 16
+; CHECK-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[X:%.*]], 0
+; CHECK-NEXT: br i1 [[TOBOOL_NOT]], label [[IF_ELSE:%.*]], label [[IF_THEN:%.*]]
+; CHECK: if.then:
+; CHECK-NEXT: br label [[IF_END:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: [[ADD_PTR:%.*]] = getelementptr inbounds i8, ptr [[ARRAY2]], i64 16
+; CHECK-NEXT: br label [[IF_END]]
+; CHECK: if.end:
+; CHECK-NEXT: [[TMP0:%.*]] = phi i64 [ 16, [[IF_THEN]] ], [ 32, [[IF_ELSE]] ]
+; CHECK-NEXT: [[TMP5:%.*]] = phi i64 [ 0, [[IF_THEN]] ], [ 16, [[IF_ELSE]] ]
+; CHECK-NEXT: [[PTR:%.*]] = phi ptr [ [[ARRAY1]], [[IF_THEN]] ], [ [[ADD_PTR]], [[IF_ELSE]] ]
+; CHECK-NEXT: [[ADD_PTR2_IDX:%.*]] = mul i64 [[I:%.*]], 4
+; CHECK-NEXT: [[TMP6:%.*]] = add i64 [[TMP5]], [[ADD_PTR2_IDX]]
+; CHECK-NEXT: [[ADD_PTR2:%.*]] = getelementptr inbounds i32, ptr [[PTR]], i64 [[I]]
+; CHECK-NEXT: [[TMP1:%.*]] = sub i64 [[TMP0]], [[TMP6]]
+; CHECK-NEXT: [[TMP2:%.*]] = icmp ult i64 [[TMP0]], [[TMP6]]
+; CHECK-NEXT: [[TMP3:%.*]] = select i1 [[TMP2]], i64 0, i64 [[TMP1]]
+; CHECK-NEXT: [[TMP4:%.*]] = icmp ne i64 [[TMP3]], -1
+; CHECK-NEXT: call void @llvm.assume(i1 [[TMP4]])
+; CHECK-NEXT: ret i64 [[TMP3]]
+;
+entry:
+ %array1 = alloca [4 x i32], align 16
+ %array2 = alloca [8 x i32], align 16
+ %tobool.not = icmp eq i32 %x, 0
+ br i1 %tobool.not, label %if.else, label %if.then
+
+if.then:
+ br label %if.end
+
+if.else:
+ %add.ptr = getelementptr inbounds i8, ptr %array2, i64 16
+ br label %if.end
+
+if.end:
+ %ptr = phi ptr [ %array1, %if.then ], [ %add.ptr, %if.else ]
+ %add.ptr2 = getelementptr inbounds i32, ptr %ptr, i64 %i
+ %objsize = call i64 @llvm.objectsize.i64.p0(ptr %add.ptr2, i1 false, i1 true, i1 true)
+ ret i64 %objsize
+}
+
+
+define i64 @outofbound_offset_eval(i32 %x) {
+; CHECK-LABEL: @outofbound_offset_eval(
+; CHECK-NEXT: entry:
+; CHECK-NEXT: [[ARRAY:%.*]] = alloca [4 x i8], align 16
+; CHECK-NEXT: [[TOBOOL_NOT:%.*]] = icmp eq i32 [[X:%.*]], 0
+; CHECK-NEXT: br i1 [[TOBOOL_NOT]], label [[IF_ELSE:%.*]], label [[IF_THEN:%.*]]
+; CHECK: if.then:
+; CHECK-NEXT: [[ADD_PTR0:%.*]] = getelementptr i8, ptr [[ARRAY]], i64 10
+; CHECK-NEXT: br label [[IF_END:%.*]]
+; CHECK: if.else:
+; CHECK-NEXT: [[ADD_PTR1:%.*]] = getelementptr i8, ptr [[ARRAY]], i64 12
+; CHECK-NEXT: br label [[IF_END]]
+; CHECK: if.end:
+; CHECK-NEXT: [[PTR:%.*]] = phi ptr [ [[ADD_PTR0]], [[IF_THEN]] ], [ [[ADD_PTR1]], [[IF_ELSE]] ]
+; CHECK-NEXT: [[ADD_PTR2:%.*]] = getelementptr i8, ptr [[PTR]], i64 -10
+; CHECK-NEXT: ret i64 4
+;
+entry:
+ %array = alloca [4 x i8], align 16
+ %tobool.not = icmp eq i32 %x, 0
+ br i1 %tobool.not, label %if.else, label %if.then
+
+if.then:
+ %add.ptr0 = getelementptr i8, ptr %array, i64 10
+ br label %if.end
+
+if.else:
+ %add.ptr1 = getelementptr i8, ptr %array, i64 12
+ br label %if.end
+
+if.end:
+ %ptr = phi ptr [ %add.ptr0, %if.then ], [ %add.ptr1, %if.else ]
+ %add.ptr2 = getelementptr i8, ptr %ptr, i64 -10
+ %objsize = call i64 @llvm.objectsize.i64.p0(ptr %add.ptr2, i1 false, i1 false, i1 false)
+ ret i64 %objsize
+}
diff --git a/llvm/test/Transforms/LowerConstantIntrinsics/objectsize_basic.ll b/llvm/test/Transforms/LowerConstantIntrinsics/objectsize_basic.ll
index c90d5152e1a096..568070a8660698 100644
--- a/llvm/test/Transforms/LowerConstantIntrinsics/objectsize_basic.ll
+++ b/llvm/test/Transforms/LowerConstantIntrinsics/objectsize_basic.ll
@@ -183,4 +183,28 @@ define i32 @promote_with_objectsize_nullunknown_true() {
ret i32 %size
}
+define i64 @out_of_bound_gep() {
+; CHECK-LABEL: @out_of_bound_gep(
+; CHECK-NEXT: [[OBJ:%.*]] = alloca i8, i32 4, align 1
+; CHECK-NEXT: [[SLIDE:%.*]] = getelementptr i8, ptr [[OBJ]], i8 8
+; CHECK-NEXT: ret i64 0
+;
+ %obj = alloca i8, i32 4
+ %slide = getelementptr i8, ptr %obj, i8 8
+ %objsize = call i64 @llvm.objectsize.i64(ptr %slide, i1 false, i1 false, i1 false)
+ ret i64 %objsize
+}
+
+define i64 @out_of_bound_negative_gep() {
+; CHECK-LABEL: @out_of_bound_negative_gep(
+; CHECK-NEXT: [[OBJ:%.*]] = alloca i8, i32 4, align 1
+; CHECK-NEXT: [[SLIDE:%.*]] = getelementptr i8, ptr [[OBJ]], i8 -8
+; CHECK-NEXT: ret i64 0
+;
+ %obj = alloca i8, i32 4
+ %slide = getelementptr i8, ptr %obj, i8 -8
+ %objsize = call i64 @llvm.objectsize.i64(ptr %slide, i1 false, i1 false, i1 false)
+ ret i64 %objsize
+}
+
declare i32 @llvm.objectsize.i32.p0(ptr, i1, i1, i1)
More information about the llvm-branch-commits
mailing list