[llvm] [Attributor]: AApointerInfo - store the full chain of instructions that make up the access (PR #96526)
Shilei Tian via llvm-commits
llvm-commits at lists.llvm.org
Mon Jul 8 08:21:18 PDT 2024
================
@@ -5785,6 +5793,136 @@ struct AAPointerInfo : public AbstractAttribute {
AK_MUST_READ_WRITE = AK_MUST | AK_R | AK_W,
};
+ /// A helper containing a list of offsets computed for a Use. Ideally this
+ /// list should be strictly ascending, but we ensure that only when we
+ /// actually translate the list of offsets to a RangeList.
+ struct OffsetInfo {
+ using VecTy = SmallVector<AA::RangeTy>;
+ // A map to store depth 1 predecessors per offset.
+ using OriginsTy = SmallVector<SmallPtrSet<Value *, 4>>;
+ using const_iterator = VecTy::const_iterator;
+ OriginsTy Origins;
+ VecTy Ranges;
+
+ const_iterator begin() const { return Ranges.begin(); }
+ const_iterator end() const { return Ranges.end(); }
+
+ bool operator==(const OffsetInfo &RHS) const {
+ return Ranges == RHS.Ranges && Origins == RHS.Origins;
+ }
+
+ bool operator!=(const OffsetInfo &RHS) const { return !(*this == RHS); }
+
+ void insert(AA::RangeTy Range, Value &V) {
+
+ auto *It = std::find(Ranges.begin(), Ranges.end(), Range);
+
+ // Offset exists in Offsets map
+ if (It != Ranges.end()) {
+ size_t Index = It - Ranges.begin();
+ if (Index < Origins.size())
+ Origins[Index].insert(&V);
+ } else {
+ Ranges.push_back(Range);
+ Origins.emplace_back();
+ Origins.back().insert(&V);
+ }
+ }
+
+ void setSizeAll(uint64_t Size) {
+ for (auto &Range : Ranges)
+ Range.Size = Size;
+ }
+
+ void getOnlyOffsets(SmallVector<int64_t> &Offsets) {
+
+ for (auto &Range : Ranges)
+ Offsets.push_back(Range.Offset);
+
+ // ensure unique
+ sort(Offsets.begin(), Offsets.end());
+ Offsets.erase(std::unique(Offsets.begin(), Offsets.end()), Offsets.end());
+ }
+
+ bool isUnassigned() const { return Ranges.empty(); }
+
+ bool isUnknown() const {
+ if (isUnassigned())
+ return false;
+ if (Ranges.size() == 1)
+ return Ranges.front().Offset == AA::RangeTy::Unknown;
+ return false;
+ }
+
+ void setUnknown(Value &V) {
+ Ranges.clear();
+ Origins.clear();
+ insert(AA::RangeTy{AA::RangeTy::Unknown, AA::RangeTy::Unknown}, V);
+ }
+
+ void addToAll(int64_t Inc, Value &V) {
+ for (auto &Range : Ranges)
+ Range.Offset += Inc;
+
+ if (!Origins.empty()) {
+ for (auto &Origin : Origins)
+ Origin.insert(&V);
+ } else {
+ for (size_t Index = 0; Index < Ranges.size(); Index++) {
+ Origins.emplace_back();
+ Origins[Index].insert(&V);
+ }
+ }
+ }
+
+ void addToAll(int64_t Inc) {
+ for (auto &Range : Ranges)
+ Range.Offset += Inc;
+ }
+
+ /// Copy offsets from \p R into the current list.
+ ///
+ /// Ideally all lists should be strictly ascending, but we defer that to the
+ /// actual use of the list. So we just blindly append here.
+ void merge(const OffsetInfo &R) {
+ Ranges.append(R.Ranges);
+ // ensure elements are unique.
+ sort(Ranges.begin(), Ranges.end());
+ Ranges.erase(std::unique(Ranges.begin(), Ranges.end()), Ranges.end());
+
+ OriginsTy ToBeMergeOrigins = R.Origins;
+ for (auto &Origin : ToBeMergeOrigins)
+ Origins.emplace_back(Origin);
+ }
+
+ void mergeWithOffset(const OffsetInfo &R, Value &CurPtr) {
+
----------------
shiltian wrote:
```suggestion
```
https://github.com/llvm/llvm-project/pull/96526
More information about the llvm-commits
mailing list