[llvm] [InstrProf] Adding utility weights to BalancedPartitioning (PR #72717)
via llvm-commits
llvm-commits at lists.llvm.org
Fri Jan 19 13:36:59 PST 2024
https://github.com/spupyrev updated https://github.com/llvm/llvm-project/pull/72717
>From 1bc2dc5978f9bdf375d470c754aed822cf570593 Mon Sep 17 00:00:00 2001
From: spupyrev <spupyrev at fb.com>
Date: Fri, 17 Nov 2023 14:51:58 -0800
Subject: [PATCH 1/5] [InstrProf] Adding utility weights to
BalancedPartitioning
---
.../llvm/Support/BalancedPartitioning.h | 12 +++-
llvm/lib/ProfileData/InstrProf.cpp | 15 +++--
llvm/lib/Support/BalancedPartitioning.cpp | 35 +++++++----
.../Support/BalancedPartitioningTest.cpp | 61 +++++++++++++++++++
4 files changed, 106 insertions(+), 17 deletions(-)
diff --git a/llvm/include/llvm/Support/BalancedPartitioning.h b/llvm/include/llvm/Support/BalancedPartitioning.h
index a8464ac0fe60e5..11d085badc10a0 100644
--- a/llvm/include/llvm/Support/BalancedPartitioning.h
+++ b/llvm/include/llvm/Support/BalancedPartitioning.h
@@ -57,8 +57,14 @@ class BPFunctionNode {
friend class BalancedPartitioning;
public:
+ /// The type of ID
using IDT = uint64_t;
- using UtilityNodeT = uint32_t;
+ /// The type of UtilityNode
+ struct UtilityNodeT {
+ UtilityNodeT(uint32_t id, uint32_t weight = 1) : id(id), weight(weight) {}
+ uint32_t id;
+ uint32_t weight;
+ };
/// \param UtilityNodes the set of utility nodes (must be unique'd)
BPFunctionNode(IDT Id, ArrayRef<UtilityNodeT> UtilityNodes)
@@ -69,6 +75,8 @@ class BPFunctionNode {
void dump(raw_ostream &OS) const;
+ std::optional<unsigned> getBucket() const { return Bucket; };
+
protected:
/// The list of utility nodes associated with this node
SmallVector<UtilityNodeT, 4> UtilityNodes;
@@ -188,6 +196,8 @@ class BalancedPartitioning {
float CachedGainRL;
/// Whether \p CachedGainLR and \p CachedGainRL are valid
bool CachedGainIsValid = false;
+ /// The weight of this utility node
+ uint32_t Weight = 1;
};
protected:
diff --git a/llvm/lib/ProfileData/InstrProf.cpp b/llvm/lib/ProfileData/InstrProf.cpp
index 2640027455e0da..2488105a4ec589 100644
--- a/llvm/lib/ProfileData/InstrProf.cpp
+++ b/llvm/lib/ProfileData/InstrProf.cpp
@@ -923,10 +923,10 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
const int N = Log2_64(LargestTraceSize) + 1;
- // TODO: We need to use the Trace.Weight field to give more weight to more
+ // TODO: We may use the Trace.Weight field to give more weight to more
// important utilities
DenseMap<IDT, SmallVector<UtilityNodeT, 4>> FuncGroups;
- for (size_t TraceIdx = 0; TraceIdx < Traces.size(); TraceIdx++) {
+ for (uint32_t TraceIdx = 0; TraceIdx < Traces.size(); TraceIdx++) {
auto &Trace = Traces[TraceIdx].FunctionNameRefs;
for (size_t Timestamp = 0; Timestamp < Trace.size(); Timestamp++) {
for (int I = Log2_64(Timestamp + 1); I < N; I++) {
@@ -940,8 +940,15 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
std::vector<BPFunctionNode> Nodes;
for (auto Id : FunctionIds) {
auto &UNs = FuncGroups[Id];
- llvm::sort(UNs);
- UNs.erase(std::unique(UNs.begin(), UNs.end()), UNs.end());
+ llvm::sort(UNs.begin(), UNs.end(),
+ [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.id < R.id;
+ });
+ UNs.erase(std::unique(UNs.begin(), UNs.end(),
+ [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.id == R.id;
+ }),
+ UNs.end());
Nodes.emplace_back(Id, UNs);
}
return Nodes;
diff --git a/llvm/lib/Support/BalancedPartitioning.cpp b/llvm/lib/Support/BalancedPartitioning.cpp
index cb6ba61179941f..45c120263d3adf 100644
--- a/llvm/lib/Support/BalancedPartitioning.cpp
+++ b/llvm/lib/Support/BalancedPartitioning.cpp
@@ -20,6 +20,15 @@
using namespace llvm;
#define DEBUG_TYPE "balanced-partitioning"
+namespace llvm {
+template <> struct format_provider<BPFunctionNode::UtilityNodeT> {
+ static void format(const BPFunctionNode::UtilityNodeT &V, raw_ostream &OS,
+ StringRef Style) {
+ OS << "(" << V.id << "-" << V.weight << ")";
+ }
+};
+} // namespace llvm
+
void BPFunctionNode::dump(raw_ostream &OS) const {
OS << formatv("{{ID={0} Utilities={{{1:$[,]}} Bucket={2}}", Id,
make_range(UtilityNodes.begin(), UtilityNodes.end()), Bucket);
@@ -188,12 +197,12 @@ void BalancedPartitioning::runIterations(const FunctionNodeRange Nodes,
SignaturesT Signatures(/*Size=*/UtilityNodeIndex.size());
for (auto &N : Nodes) {
for (auto &UN : N.UtilityNodes) {
- assert(UN < Signatures.size());
- if (N.Bucket == LeftBucket) {
- Signatures[UN].LeftCount++;
- } else {
- Signatures[UN].RightCount++;
- }
+ assert(UN.id < Signatures.size());
+ if (N.Bucket == LeftBucket)
+ Signatures[UN.id].LeftCount++;
+ else
+ Signatures[UN.id].RightCount++;
+ Signatures[UN.id].Weight = UN.weight;
}
}
@@ -221,9 +230,11 @@ unsigned BalancedPartitioning::runIteration(const FunctionNodeRange Nodes,
Signature.CachedGainLR = 0.f;
Signature.CachedGainRL = 0.f;
if (L > 0)
- Signature.CachedGainLR = Cost - logCost(L - 1, R + 1);
+ Signature.CachedGainLR =
+ (Cost - logCost(L - 1, R + 1)) * Signature.Weight;
if (R > 0)
- Signature.CachedGainRL = Cost - logCost(L + 1, R - 1);
+ Signature.CachedGainRL =
+ (Cost - logCost(L + 1, R - 1)) * Signature.Weight;
Signature.CachedGainIsValid = true;
}
@@ -282,14 +293,14 @@ bool BalancedPartitioning::moveFunctionNode(BPFunctionNode &N,
// Update signatures and invalidate gain cache
if (FromLeftToRight) {
for (auto &UN : N.UtilityNodes) {
- auto &Signature = Signatures[UN];
+ auto &Signature = Signatures[UN.id];
Signature.LeftCount--;
Signature.RightCount++;
Signature.CachedGainIsValid = false;
}
} else {
for (auto &UN : N.UtilityNodes) {
- auto &Signature = Signatures[UN];
+ auto &Signature = Signatures[UN.id];
Signature.LeftCount++;
Signature.RightCount--;
Signature.CachedGainIsValid = false;
@@ -318,8 +329,8 @@ float BalancedPartitioning::moveGain(const BPFunctionNode &N,
const SignaturesT &Signatures) {
float Gain = 0.f;
for (auto &UN : N.UtilityNodes)
- Gain += (FromLeftToRight ? Signatures[UN].CachedGainLR
- : Signatures[UN].CachedGainRL);
+ Gain += (FromLeftToRight ? Signatures[UN.id].CachedGainLR
+ : Signatures[UN.id].CachedGainRL);
return Gain;
}
diff --git a/llvm/unittests/Support/BalancedPartitioningTest.cpp b/llvm/unittests/Support/BalancedPartitioningTest.cpp
index ebe518a8e89cac..aac7e6edc92181 100644
--- a/llvm/unittests/Support/BalancedPartitioningTest.cpp
+++ b/llvm/unittests/Support/BalancedPartitioningTest.cpp
@@ -7,6 +7,7 @@
//===----------------------------------------------------------------------===//
#include "llvm/Support/BalancedPartitioning.h"
+#include "llvm/Support/MathExtras.h"
#include "llvm/Testing/Support/SupportHelpers.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
@@ -37,6 +38,14 @@ class BalancedPartitioningTest : public ::testing::Test {
Ids.push_back(N.Id);
return Ids;
}
+
+ static std::vector<std::pair<BPFunctionNode::IDT, uint32_t>>
+ getBuckets(std::vector<BPFunctionNode> &Nodes) {
+ std::vector<std::pair<BPFunctionNode::IDT, uint32_t>> Res;
+ for (auto &N : Nodes)
+ Res.emplace_back(std::make_pair(N.Id, *N.getBucket()));
+ return Res;
+ }
};
TEST_F(BalancedPartitioningTest, Basic) {
@@ -97,4 +106,56 @@ TEST_F(BalancedPartitioningTest, MoveGain) {
30.f);
}
+TEST_F(BalancedPartitioningTest, Weight1) {
+ std::vector<BPFunctionNode::UtilityNodeT> UNs = {
+ BPFunctionNode::UtilityNodeT(0, 100),
+ BPFunctionNode::UtilityNodeT(1, 100),
+ BPFunctionNode::UtilityNodeT(2, 100),
+ BPFunctionNode::UtilityNodeT(3, 1),
+ BPFunctionNode::UtilityNodeT(4, 1),
+ };
+ std::vector<BPFunctionNode> Nodes = {
+ BPFunctionNode(0, {UNs[0], UNs[3]}), BPFunctionNode(1, {UNs[1], UNs[3]}),
+ BPFunctionNode(2, {UNs[2], UNs[3]}), BPFunctionNode(3, {UNs[0], UNs[4]}),
+ BPFunctionNode(4, {UNs[1], UNs[4]}), BPFunctionNode(5, {UNs[2], UNs[4]}),
+ };
+
+ Bp.run(Nodes);
+
+ // Verify that the created groups are [(0,3) -- (1,4) -- (2,5)].
+ auto Res = getBuckets(Nodes);
+ llvm::sort(Res);
+ EXPECT_THAT(AbsoluteDifference(Res[0].second, Res[3].second), 1);
+ EXPECT_THAT(AbsoluteDifference(Res[1].second, Res[4].second), 1);
+ EXPECT_THAT(AbsoluteDifference(Res[2].second, Res[5].second), 1);
+}
+
+TEST_F(BalancedPartitioningTest, Weight2) {
+ std::vector<BPFunctionNode::UtilityNodeT> UNs = {
+ BPFunctionNode::UtilityNodeT(0, 1),
+ BPFunctionNode::UtilityNodeT(1, 1),
+ BPFunctionNode::UtilityNodeT(2, 1),
+ BPFunctionNode::UtilityNodeT(3, 100),
+ BPFunctionNode::UtilityNodeT(4, 100),
+ };
+ std::vector<BPFunctionNode> Nodes = {
+ BPFunctionNode(0, {UNs[0], UNs[3]}), BPFunctionNode(1, {UNs[1], UNs[4]}),
+ BPFunctionNode(2, {UNs[2], UNs[3]}), BPFunctionNode(3, {UNs[0], UNs[4]}),
+ BPFunctionNode(4, {UNs[1], UNs[3]}), BPFunctionNode(5, {UNs[2], UNs[4]}),
+ };
+
+ Bp.run(Nodes);
+
+ // Verify that the created groups are [(0,2,4) -- (1,3,5)].
+ auto Res = getBuckets(Nodes);
+ llvm::sort(Res);
+ EXPECT_LE(AbsoluteDifference(Res[0].second, Res[2].second), 2u);
+ EXPECT_LE(AbsoluteDifference(Res[0].second, Res[4].second), 2u);
+ EXPECT_LE(AbsoluteDifference(Res[2].second, Res[4].second), 2u);
+
+ EXPECT_LE(AbsoluteDifference(Res[1].second, Res[3].second), 2u);
+ EXPECT_LE(AbsoluteDifference(Res[1].second, Res[5].second), 2u);
+ EXPECT_LE(AbsoluteDifference(Res[3].second, Res[5].second), 2u);
+}
+
} // end namespace llvm
>From c5b47b273ec3d667d27e9843acc1d5a394e02263 Mon Sep 17 00:00:00 2001
From: spupyrev <spupyrev at fb.com>
Date: Mon, 20 Nov 2023 11:52:36 -0800
Subject: [PATCH 2/5] addressing review
---
llvm/lib/ProfileData/InstrProf.cpp | 13 +++++++++----
llvm/lib/Support/BalancedPartitioning.cpp | 8 +++++++-
2 files changed, 16 insertions(+), 5 deletions(-)
diff --git a/llvm/lib/ProfileData/InstrProf.cpp b/llvm/lib/ProfileData/InstrProf.cpp
index 2488105a4ec589..d1233385ea87db 100644
--- a/llvm/lib/ProfileData/InstrProf.cpp
+++ b/llvm/lib/ProfileData/InstrProf.cpp
@@ -929,9 +929,15 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
for (uint32_t TraceIdx = 0; TraceIdx < Traces.size(); TraceIdx++) {
auto &Trace = Traces[TraceIdx].FunctionNameRefs;
for (size_t Timestamp = 0; Timestamp < Trace.size(); Timestamp++) {
+<<<<<<< HEAD
for (int I = Log2_64(Timestamp + 1); I < N; I++) {
auto FunctionId = Trace[Timestamp];
UtilityNodeT GroupId = TraceIdx * N + I;
+=======
+ for (int I = std::floor(std::log2(Timestamp + 1)); I < N; I++) {
+ auto &FunctionId = Trace[Timestamp];
+ UtilityNodeT GroupId(TraceIdx * N + I);
+>>>>>>> 0c3051fa80e6 (addressing review)
FuncGroups[FunctionId].push_back(GroupId);
}
}
@@ -940,10 +946,9 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
std::vector<BPFunctionNode> Nodes;
for (auto Id : FunctionIds) {
auto &UNs = FuncGroups[Id];
- llvm::sort(UNs.begin(), UNs.end(),
- [](const UtilityNodeT &L, const UtilityNodeT &R) {
- return L.id < R.id;
- });
+ llvm::sort(UNs, [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.id < R.id;
+ });
UNs.erase(std::unique(UNs.begin(), UNs.end(),
[](const UtilityNodeT &L, const UtilityNodeT &R) {
return L.id == R.id;
diff --git a/llvm/lib/Support/BalancedPartitioning.cpp b/llvm/lib/Support/BalancedPartitioning.cpp
index 45c120263d3adf..06751ea757c0c4 100644
--- a/llvm/lib/Support/BalancedPartitioning.cpp
+++ b/llvm/lib/Support/BalancedPartitioning.cpp
@@ -202,7 +202,13 @@ void BalancedPartitioning::runIterations(const FunctionNodeRange Nodes,
Signatures[UN.id].LeftCount++;
else
Signatures[UN.id].RightCount++;
- Signatures[UN.id].Weight = UN.weight;
+ // Identical utility nodes (having the same UN.id) are guaranteed to have
+ // the same weight; thus, we can get a new weight only when the signature
+ // is uninitialized.
+ if (Signatures[UN.id].Weight != UN.weight) {
+ assert(Signatures[UN.id].Weight == 1);
+ Signatures[UN.id].Weight = UN.weight;
+ }
}
}
>From aa5b01d6cb1a52154597133a6e9415e63e6307c6 Mon Sep 17 00:00:00 2001
From: spupyrev <spupyrev at fb.com>
Date: Thu, 18 Jan 2024 07:22:50 -0800
Subject: [PATCH 3/5] rebase
---
.../llvm/Support/BalancedPartitioning.h | 5 +-
llvm/lib/ProfileData/InstrProf.cpp | 6 --
llvm/lib/Support/BalancedPartitioning.cpp | 19 ++++--
.../ProfileData/BPFunctionNodeTest.cpp | 30 ++++++----
.../Support/BalancedPartitioningTest.cpp | 60 +++++++++----------
5 files changed, 61 insertions(+), 59 deletions(-)
diff --git a/llvm/include/llvm/Support/BalancedPartitioning.h b/llvm/include/llvm/Support/BalancedPartitioning.h
index 11d085badc10a0..c00f7b1d2b6434 100644
--- a/llvm/include/llvm/Support/BalancedPartitioning.h
+++ b/llvm/include/llvm/Support/BalancedPartitioning.h
@@ -75,8 +75,6 @@ class BPFunctionNode {
void dump(raw_ostream &OS) const;
- std::optional<unsigned> getBucket() const { return Bucket; };
-
protected:
/// The list of utility nodes associated with this node
SmallVector<UtilityNodeT, 4> UtilityNodes;
@@ -86,8 +84,7 @@ class BPFunctionNode {
uint64_t InputOrderIndex = 0;
friend class BPFunctionNodeTest_Basic_Test;
- friend class BalancedPartitioningTest_Basic_Test;
- friend class BalancedPartitioningTest_Large_Test;
+ friend class BalancedPartitioningTest;
};
/// Algorithm parameters; default values are tuned on real-world binaries
diff --git a/llvm/lib/ProfileData/InstrProf.cpp b/llvm/lib/ProfileData/InstrProf.cpp
index d1233385ea87db..f0c2ffb1de2d0e 100644
--- a/llvm/lib/ProfileData/InstrProf.cpp
+++ b/llvm/lib/ProfileData/InstrProf.cpp
@@ -929,15 +929,9 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
for (uint32_t TraceIdx = 0; TraceIdx < Traces.size(); TraceIdx++) {
auto &Trace = Traces[TraceIdx].FunctionNameRefs;
for (size_t Timestamp = 0; Timestamp < Trace.size(); Timestamp++) {
-<<<<<<< HEAD
for (int I = Log2_64(Timestamp + 1); I < N; I++) {
auto FunctionId = Trace[Timestamp];
- UtilityNodeT GroupId = TraceIdx * N + I;
-=======
- for (int I = std::floor(std::log2(Timestamp + 1)); I < N; I++) {
- auto &FunctionId = Trace[Timestamp];
UtilityNodeT GroupId(TraceIdx * N + I);
->>>>>>> 0c3051fa80e6 (addressing review)
FuncGroups[FunctionId].push_back(GroupId);
}
}
diff --git a/llvm/lib/Support/BalancedPartitioning.cpp b/llvm/lib/Support/BalancedPartitioning.cpp
index 06751ea757c0c4..a9733bb2844a3f 100644
--- a/llvm/lib/Support/BalancedPartitioning.cpp
+++ b/llvm/lib/Support/BalancedPartitioning.cpp
@@ -30,8 +30,13 @@ template <> struct format_provider<BPFunctionNode::UtilityNodeT> {
} // namespace llvm
void BPFunctionNode::dump(raw_ostream &OS) const {
- OS << formatv("{{ID={0} Utilities={{{1:$[,]}} Bucket={2}}", Id,
- make_range(UtilityNodes.begin(), UtilityNodes.end()), Bucket);
+ OS << "{ID=" << Id << " Utilities={";
+ for (auto &N : UtilityNodes)
+ OS << N.id << " ,";
+ OS << "}";
+ if (Bucket.has_value())
+ OS << " Bucket=" << Bucket.value();
+ OS << "}";
}
template <typename Func>
@@ -176,22 +181,24 @@ void BalancedPartitioning::runIterations(const FunctionNodeRange Nodes,
unsigned RightBucket,
std::mt19937 &RNG) const {
unsigned NumNodes = std::distance(Nodes.begin(), Nodes.end());
- DenseMap<BPFunctionNode::UtilityNodeT, unsigned> UtilityNodeIndex;
+ DenseMap<uint32_t, unsigned> UtilityNodeIndex;
for (auto &N : Nodes)
for (auto &UN : N.UtilityNodes)
- ++UtilityNodeIndex[UN];
+ ++UtilityNodeIndex[UN.id];
// Remove utility nodes if they have just one edge or are connected to all
// functions
for (auto &N : Nodes)
llvm::erase_if(N.UtilityNodes, [&](auto &UN) {
- return UtilityNodeIndex[UN] == 1 || UtilityNodeIndex[UN] == NumNodes;
+ return UtilityNodeIndex[UN.id] == 1 ||
+ UtilityNodeIndex[UN.id] == NumNodes;
});
// Renumber utility nodes so they can be used to index into Signatures
UtilityNodeIndex.clear();
for (auto &N : Nodes)
for (auto &UN : N.UtilityNodes)
- UN = UtilityNodeIndex.insert({UN, UtilityNodeIndex.size()}).first->second;
+ UN.id = UtilityNodeIndex.insert({UN.id, UtilityNodeIndex.size()})
+ .first->second;
// Initialize signatures
SignaturesT Signatures(/*Size=*/UtilityNodeIndex.size());
diff --git a/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp b/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
index 6af6f1bcdc40a8..7dfc4669b40b14 100644
--- a/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
+++ b/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
@@ -13,8 +13,8 @@
#include "gtest/gtest.h"
using testing::Field;
+using testing::Matcher;
using testing::UnorderedElementsAre;
-using testing::UnorderedElementsAreArray;
namespace llvm {
@@ -24,29 +24,35 @@ void PrintTo(const BPFunctionNode &Node, std::ostream *OS) {
}
TEST(BPFunctionNodeTest, Basic) {
+ auto UNIdsAre = [](auto... Ids) {
+ return UnorderedElementsAre(Field("id", &BPFunctionNode::UtilityNodeT::id,
+ std::forward<uint32_t>(Ids))...);
+ };
auto NodeIs = [](BPFunctionNode::IDT Id,
- ArrayRef<BPFunctionNode::UtilityNodeT> UNs) {
- return AllOf(Field("Id", &BPFunctionNode::Id, Id),
- Field("UtilityNodes", &BPFunctionNode::UtilityNodes,
- UnorderedElementsAreArray(UNs)));
+ Matcher<ArrayRef<BPFunctionNode::UtilityNodeT>> UNsMatcher) {
+ return AllOf(
+ Field("Id", &BPFunctionNode::Id, Id),
+ Field("UtilityNodes", &BPFunctionNode::UtilityNodes, UNsMatcher));
};
auto Nodes = TemporalProfTraceTy::createBPFunctionNodes({
TemporalProfTraceTy({0, 1, 2, 3}),
});
- EXPECT_THAT(Nodes,
- UnorderedElementsAre(NodeIs(0, {0, 1, 2}), NodeIs(1, {1, 2}),
- NodeIs(2, {1, 2}), NodeIs(3, {2})));
+ EXPECT_THAT(Nodes, UnorderedElementsAre(NodeIs(0, UNIdsAre(0, 1, 2)),
+ NodeIs(1, UNIdsAre(1, 2)),
+ NodeIs(2, UNIdsAre(1, 2)),
+ NodeIs(3, UNIdsAre(2))));
Nodes = TemporalProfTraceTy::createBPFunctionNodes({
TemporalProfTraceTy({0, 1, 2, 3, 4}),
TemporalProfTraceTy({4, 2}),
});
- EXPECT_THAT(Nodes,
- UnorderedElementsAre(NodeIs(0, {0, 1, 2}), NodeIs(1, {1, 2}),
- NodeIs(2, {1, 2, 4, 5}), NodeIs(3, {2}),
- NodeIs(4, {2, 3, 4, 5})));
+ EXPECT_THAT(Nodes, UnorderedElementsAre(NodeIs(0, UNIdsAre(0, 1, 2)),
+ NodeIs(1, UNIdsAre(1, 2)),
+ NodeIs(2, UNIdsAre(1, 2, 4, 5)),
+ NodeIs(3, UNIdsAre(2)),
+ NodeIs(4, UNIdsAre(2, 3, 4, 5))));
}
} // end namespace llvm
diff --git a/llvm/unittests/Support/BalancedPartitioningTest.cpp b/llvm/unittests/Support/BalancedPartitioningTest.cpp
index aac7e6edc92181..79e89ae2014807 100644
--- a/llvm/unittests/Support/BalancedPartitioningTest.cpp
+++ b/llvm/unittests/Support/BalancedPartitioningTest.cpp
@@ -7,7 +7,6 @@
//===----------------------------------------------------------------------===//
#include "llvm/Support/BalancedPartitioning.h"
-#include "llvm/Support/MathExtras.h"
#include "llvm/Testing/Support/SupportHelpers.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
@@ -39,13 +38,19 @@ class BalancedPartitioningTest : public ::testing::Test {
return Ids;
}
- static std::vector<std::pair<BPFunctionNode::IDT, uint32_t>>
- getBuckets(std::vector<BPFunctionNode> &Nodes) {
- std::vector<std::pair<BPFunctionNode::IDT, uint32_t>> Res;
- for (auto &N : Nodes)
- Res.emplace_back(std::make_pair(N.Id, *N.getBucket()));
- return Res;
- }
+ static testing::Matcher<BPFunctionNode> NodeIdIs(BPFunctionNode::IDT Id) {
+ return Field("Id", &BPFunctionNode::Id, Id);
+ };
+
+ static testing::Matcher<BPFunctionNode>
+ NodeBucketIs(std::optional<uint32_t> Bucket) {
+ return Field("Bucket", &BPFunctionNode::Bucket, Bucket);
+ };
+
+ static testing::Matcher<BPFunctionNode>
+ NodeIs(BPFunctionNode::IDT Id, std::optional<uint32_t> Bucket) {
+ return AllOf(NodeIdIs(Id), NodeBucketIs(Bucket));
+ };
};
TEST_F(BalancedPartitioningTest, Basic) {
@@ -57,11 +62,6 @@ TEST_F(BalancedPartitioningTest, Basic) {
Bp.run(Nodes);
- auto NodeIs = [](BPFunctionNode::IDT Id, std::optional<uint32_t> Bucket) {
- return AllOf(Field("Id", &BPFunctionNode::Id, Id),
- Field("Bucket", &BPFunctionNode::Bucket, Bucket));
- };
-
EXPECT_THAT(Nodes,
UnorderedElementsAre(NodeIs(0, 0), NodeIs(1, 1), NodeIs(2, 2),
NodeIs(3, 3), NodeIs(4, 4)));
@@ -88,8 +88,7 @@ TEST_F(BalancedPartitioningTest, Large) {
Bp.run(Nodes);
- EXPECT_THAT(
- Nodes, Each(Not(Field("Bucket", &BPFunctionNode::Bucket, std::nullopt))));
+ EXPECT_THAT(Nodes, Each(Not(NodeBucketIs(std::nullopt))));
EXPECT_THAT(getIds(Nodes), UnorderedElementsAreArray(OrigIds));
}
@@ -122,12 +121,14 @@ TEST_F(BalancedPartitioningTest, Weight1) {
Bp.run(Nodes);
- // Verify that the created groups are [(0,3) -- (1,4) -- (2,5)].
- auto Res = getBuckets(Nodes);
- llvm::sort(Res);
- EXPECT_THAT(AbsoluteDifference(Res[0].second, Res[3].second), 1);
- EXPECT_THAT(AbsoluteDifference(Res[1].second, Res[4].second), 1);
- EXPECT_THAT(AbsoluteDifference(Res[2].second, Res[5].second), 1);
+ // Check that nodes that share important UNs are ordered together
+ auto NodesRef = ArrayRef(Nodes);
+ auto Groups = {NodesRef.slice(0, 2), NodesRef.slice(2, 2),
+ NodesRef.slice(4, 2)};
+ EXPECT_THAT(Groups, UnorderedElementsAre(
+ UnorderedElementsAre(NodeIdIs(0), NodeIdIs(3)),
+ UnorderedElementsAre(NodeIdIs(1), NodeIdIs(4)),
+ UnorderedElementsAre(NodeIdIs(2), NodeIdIs(5))));
}
TEST_F(BalancedPartitioningTest, Weight2) {
@@ -146,16 +147,13 @@ TEST_F(BalancedPartitioningTest, Weight2) {
Bp.run(Nodes);
- // Verify that the created groups are [(0,2,4) -- (1,3,5)].
- auto Res = getBuckets(Nodes);
- llvm::sort(Res);
- EXPECT_LE(AbsoluteDifference(Res[0].second, Res[2].second), 2u);
- EXPECT_LE(AbsoluteDifference(Res[0].second, Res[4].second), 2u);
- EXPECT_LE(AbsoluteDifference(Res[2].second, Res[4].second), 2u);
-
- EXPECT_LE(AbsoluteDifference(Res[1].second, Res[3].second), 2u);
- EXPECT_LE(AbsoluteDifference(Res[1].second, Res[5].second), 2u);
- EXPECT_LE(AbsoluteDifference(Res[3].second, Res[5].second), 2u);
+ // Check that nodes that share important UNs are ordered together
+ auto NodesRef = ArrayRef(Nodes);
+ auto Groups = {NodesRef.slice(0, 3), NodesRef.slice(3, 3)};
+ EXPECT_THAT(Groups,
+ UnorderedElementsAre(
+ UnorderedElementsAre(NodeIdIs(0), NodeIdIs(2), NodeIdIs(4)),
+ UnorderedElementsAre(NodeIdIs(1), NodeIdIs(3), NodeIdIs(5))));
}
} // end namespace llvm
>From 73be288748eca0538448ce58728d42c0675e3a39 Mon Sep 17 00:00:00 2001
From: spupyrev <spupyrev at fb.com>
Date: Thu, 18 Jan 2024 09:57:55 -0800
Subject: [PATCH 4/5] review 2
---
.../llvm/Support/BalancedPartitioning.h | 6 +--
llvm/lib/ProfileData/InstrProf.cpp | 23 +++++---
llvm/lib/Support/BalancedPartitioning.cpp | 54 ++++++++-----------
.../ProfileData/BPFunctionNodeTest.cpp | 2 +-
4 files changed, 41 insertions(+), 44 deletions(-)
diff --git a/llvm/include/llvm/Support/BalancedPartitioning.h b/llvm/include/llvm/Support/BalancedPartitioning.h
index c00f7b1d2b6434..7cd05861600453 100644
--- a/llvm/include/llvm/Support/BalancedPartitioning.h
+++ b/llvm/include/llvm/Support/BalancedPartitioning.h
@@ -61,9 +61,9 @@ class BPFunctionNode {
using IDT = uint64_t;
/// The type of UtilityNode
struct UtilityNodeT {
- UtilityNodeT(uint32_t id, uint32_t weight = 1) : id(id), weight(weight) {}
- uint32_t id;
- uint32_t weight;
+ UtilityNodeT(uint32_t Id, uint32_t Weight = 1) : Id(Id), Weight(Weight) {}
+ uint32_t Id;
+ uint32_t Weight;
};
/// \param UtilityNodes the set of utility nodes (must be unique'd)
diff --git a/llvm/lib/ProfileData/InstrProf.cpp b/llvm/lib/ProfileData/InstrProf.cpp
index f0c2ffb1de2d0e..0c1f64e63685a1 100644
--- a/llvm/lib/ProfileData/InstrProf.cpp
+++ b/llvm/lib/ProfileData/InstrProf.cpp
@@ -905,6 +905,20 @@ void InstrProfRecord::addValueData(uint32_t ValueKind, uint32_t Site,
ValueSites.emplace_back(VData, VData + N);
}
+// Deduplicate utility nodes for a given function.
+// TODO: One may experiment with accumulating the weights of duplicates.
+void sortAndDeduplicate(SmallVector<BPFunctionNode::UtilityNodeT, 4> &UNs) {
+ using UtilityNodeT = BPFunctionNode::UtilityNodeT;
+ llvm::sort(UNs, [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.Id < R.Id;
+ });
+ UNs.erase(std::unique(UNs.begin(), UNs.end(),
+ [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.Id == R.Id;
+ }),
+ UNs.end());
+}
+
std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
ArrayRef<TemporalProfTraceTy> Traces) {
using IDT = BPFunctionNode::IDT;
@@ -940,14 +954,7 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
std::vector<BPFunctionNode> Nodes;
for (auto Id : FunctionIds) {
auto &UNs = FuncGroups[Id];
- llvm::sort(UNs, [](const UtilityNodeT &L, const UtilityNodeT &R) {
- return L.id < R.id;
- });
- UNs.erase(std::unique(UNs.begin(), UNs.end(),
- [](const UtilityNodeT &L, const UtilityNodeT &R) {
- return L.id == R.id;
- }),
- UNs.end());
+ sortAndDeduplicate(UNs);
Nodes.emplace_back(Id, UNs);
}
return Nodes;
diff --git a/llvm/lib/Support/BalancedPartitioning.cpp b/llvm/lib/Support/BalancedPartitioning.cpp
index a9733bb2844a3f..8a02649a6ab369 100644
--- a/llvm/lib/Support/BalancedPartitioning.cpp
+++ b/llvm/lib/Support/BalancedPartitioning.cpp
@@ -20,19 +20,10 @@
using namespace llvm;
#define DEBUG_TYPE "balanced-partitioning"
-namespace llvm {
-template <> struct format_provider<BPFunctionNode::UtilityNodeT> {
- static void format(const BPFunctionNode::UtilityNodeT &V, raw_ostream &OS,
- StringRef Style) {
- OS << "(" << V.id << "-" << V.weight << ")";
- }
-};
-} // namespace llvm
-
void BPFunctionNode::dump(raw_ostream &OS) const {
OS << "{ID=" << Id << " Utilities={";
for (auto &N : UtilityNodes)
- OS << N.id << " ,";
+ OS << N.Id << " ,";
OS << "}";
if (Bucket.has_value())
OS << " Bucket=" << Bucket.value();
@@ -180,42 +171,41 @@ void BalancedPartitioning::runIterations(const FunctionNodeRange Nodes,
unsigned RecDepth, unsigned LeftBucket,
unsigned RightBucket,
std::mt19937 &RNG) const {
- unsigned NumNodes = std::distance(Nodes.begin(), Nodes.end());
+ // Count the degree of each utility node.
DenseMap<uint32_t, unsigned> UtilityNodeIndex;
for (auto &N : Nodes)
for (auto &UN : N.UtilityNodes)
- ++UtilityNodeIndex[UN.id];
+ ++UtilityNodeIndex[UN.Id];
// Remove utility nodes if they have just one edge or are connected to all
- // functions
+ // functions.
+ unsigned NumNodes = std::distance(Nodes.begin(), Nodes.end());
for (auto &N : Nodes)
llvm::erase_if(N.UtilityNodes, [&](auto &UN) {
- return UtilityNodeIndex[UN.id] == 1 ||
- UtilityNodeIndex[UN.id] == NumNodes;
+ return UtilityNodeIndex[UN.Id] == 1 ||
+ UtilityNodeIndex[UN.Id] == NumNodes;
});
- // Renumber utility nodes so they can be used to index into Signatures
+ // Renumber utility nodes so they can be used to index into Signatures.
UtilityNodeIndex.clear();
for (auto &N : Nodes)
for (auto &UN : N.UtilityNodes)
- UN.id = UtilityNodeIndex.insert({UN.id, UtilityNodeIndex.size()})
+ UN.Id = UtilityNodeIndex.insert({UN.Id, UtilityNodeIndex.size()})
.first->second;
- // Initialize signatures
+ // Initialize signatures.
SignaturesT Signatures(/*Size=*/UtilityNodeIndex.size());
for (auto &N : Nodes) {
for (auto &UN : N.UtilityNodes) {
- assert(UN.id < Signatures.size());
+ assert(UN.Id < Signatures.size());
if (N.Bucket == LeftBucket)
- Signatures[UN.id].LeftCount++;
+ Signatures[UN.Id].LeftCount++;
else
- Signatures[UN.id].RightCount++;
- // Identical utility nodes (having the same UN.id) are guaranteed to have
- // the same weight; thus, we can get a new weight only when the signature
- // is uninitialized.
- if (Signatures[UN.id].Weight != UN.weight) {
- assert(Signatures[UN.id].Weight == 1);
- Signatures[UN.id].Weight = UN.weight;
- }
+ Signatures[UN.Id].RightCount++;
+ // Identical utility nodes (having the same UN.Id) have the same weight
+ // (unless there are hash collisions mapping utilities to the same Id);
+ // thus, we can get a new weight only when the signature is uninitialized.
+ if (Signatures[UN.Id].Weight != UN.Weight)
+ Signatures[UN.Id].Weight = UN.Weight;
}
}
@@ -306,14 +296,14 @@ bool BalancedPartitioning::moveFunctionNode(BPFunctionNode &N,
// Update signatures and invalidate gain cache
if (FromLeftToRight) {
for (auto &UN : N.UtilityNodes) {
- auto &Signature = Signatures[UN.id];
+ auto &Signature = Signatures[UN.Id];
Signature.LeftCount--;
Signature.RightCount++;
Signature.CachedGainIsValid = false;
}
} else {
for (auto &UN : N.UtilityNodes) {
- auto &Signature = Signatures[UN.id];
+ auto &Signature = Signatures[UN.Id];
Signature.LeftCount++;
Signature.RightCount--;
Signature.CachedGainIsValid = false;
@@ -342,8 +332,8 @@ float BalancedPartitioning::moveGain(const BPFunctionNode &N,
const SignaturesT &Signatures) {
float Gain = 0.f;
for (auto &UN : N.UtilityNodes)
- Gain += (FromLeftToRight ? Signatures[UN.id].CachedGainLR
- : Signatures[UN.id].CachedGainRL);
+ Gain += (FromLeftToRight ? Signatures[UN.Id].CachedGainLR
+ : Signatures[UN.Id].CachedGainRL);
return Gain;
}
diff --git a/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp b/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
index 7dfc4669b40b14..787e3041fb5e7b 100644
--- a/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
+++ b/llvm/unittests/ProfileData/BPFunctionNodeTest.cpp
@@ -25,7 +25,7 @@ void PrintTo(const BPFunctionNode &Node, std::ostream *OS) {
TEST(BPFunctionNodeTest, Basic) {
auto UNIdsAre = [](auto... Ids) {
- return UnorderedElementsAre(Field("id", &BPFunctionNode::UtilityNodeT::id,
+ return UnorderedElementsAre(Field("Id", &BPFunctionNode::UtilityNodeT::Id,
std::forward<uint32_t>(Ids))...);
};
auto NodeIs = [](BPFunctionNode::IDT Id,
>From 1e4879550686eb0bb19c28e416ed611b98b71049 Mon Sep 17 00:00:00 2001
From: spupyrev <spupyrev at fb.com>
Date: Thu, 18 Jan 2024 11:55:08 -0800
Subject: [PATCH 5/5] more reviews
---
llvm/include/llvm/Support/BalancedPartitioning.h | 13 +++++++++++++
llvm/lib/ProfileData/InstrProf.cpp | 16 +---------------
llvm/lib/Support/BalancedPartitioning.cpp | 5 ++---
3 files changed, 16 insertions(+), 18 deletions(-)
diff --git a/llvm/include/llvm/Support/BalancedPartitioning.h b/llvm/include/llvm/Support/BalancedPartitioning.h
index 7cd05861600453..19eb6d41e45e3a 100644
--- a/llvm/include/llvm/Support/BalancedPartitioning.h
+++ b/llvm/include/llvm/Support/BalancedPartitioning.h
@@ -64,6 +64,19 @@ class BPFunctionNode {
UtilityNodeT(uint32_t Id, uint32_t Weight = 1) : Id(Id), Weight(Weight) {}
uint32_t Id;
uint32_t Weight;
+
+ // Deduplicate utility nodes for a given function.
+ // TODO: One may experiment with accumulating the weights of duplicates.
+ static void sortAndDeduplicate(SmallVector<UtilityNodeT, 4> &UNs) {
+ llvm::sort(UNs, [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.Id < R.Id;
+ });
+ UNs.erase(std::unique(UNs.begin(), UNs.end(),
+ [](const UtilityNodeT &L, const UtilityNodeT &R) {
+ return L.Id == R.Id;
+ }),
+ UNs.end());
+ }
};
/// \param UtilityNodes the set of utility nodes (must be unique'd)
diff --git a/llvm/lib/ProfileData/InstrProf.cpp b/llvm/lib/ProfileData/InstrProf.cpp
index 0c1f64e63685a1..ba4fb227381f6c 100644
--- a/llvm/lib/ProfileData/InstrProf.cpp
+++ b/llvm/lib/ProfileData/InstrProf.cpp
@@ -905,20 +905,6 @@ void InstrProfRecord::addValueData(uint32_t ValueKind, uint32_t Site,
ValueSites.emplace_back(VData, VData + N);
}
-// Deduplicate utility nodes for a given function.
-// TODO: One may experiment with accumulating the weights of duplicates.
-void sortAndDeduplicate(SmallVector<BPFunctionNode::UtilityNodeT, 4> &UNs) {
- using UtilityNodeT = BPFunctionNode::UtilityNodeT;
- llvm::sort(UNs, [](const UtilityNodeT &L, const UtilityNodeT &R) {
- return L.Id < R.Id;
- });
- UNs.erase(std::unique(UNs.begin(), UNs.end(),
- [](const UtilityNodeT &L, const UtilityNodeT &R) {
- return L.Id == R.Id;
- }),
- UNs.end());
-}
-
std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
ArrayRef<TemporalProfTraceTy> Traces) {
using IDT = BPFunctionNode::IDT;
@@ -954,7 +940,7 @@ std::vector<BPFunctionNode> TemporalProfTraceTy::createBPFunctionNodes(
std::vector<BPFunctionNode> Nodes;
for (auto Id : FunctionIds) {
auto &UNs = FuncGroups[Id];
- sortAndDeduplicate(UNs);
+ UtilityNodeT::sortAndDeduplicate(UNs);
Nodes.emplace_back(Id, UNs);
}
return Nodes;
diff --git a/llvm/lib/Support/BalancedPartitioning.cpp b/llvm/lib/Support/BalancedPartitioning.cpp
index 8a02649a6ab369..cd71ddec476475 100644
--- a/llvm/lib/Support/BalancedPartitioning.cpp
+++ b/llvm/lib/Support/BalancedPartitioning.cpp
@@ -203,9 +203,8 @@ void BalancedPartitioning::runIterations(const FunctionNodeRange Nodes,
Signatures[UN.Id].RightCount++;
// Identical utility nodes (having the same UN.Id) have the same weight
// (unless there are hash collisions mapping utilities to the same Id);
- // thus, we can get a new weight only when the signature is uninitialized.
- if (Signatures[UN.Id].Weight != UN.Weight)
- Signatures[UN.Id].Weight = UN.Weight;
+ // thus, we get a new weight only when the signature is uninitialized.
+ Signatures[UN.Id].Weight = UN.Weight;
}
}
More information about the llvm-commits
mailing list