[llvm] [AMDGPU][SIPreEmitPeephole] mustRetainExeczBranch: use BranchProbability and TargetSchedmodel (PR #109818)

Matt Arsenault via llvm-commits llvm-commits at lists.llvm.org
Wed Sep 25 04:15:56 PDT 2024


Juan Manuel Martinez =?utf-8?q?Caama=C3=B1o?= <juamarti at amd.com>,
Juan Manuel Martinez =?utf-8?q?Caama=C3=B1o?= <juamarti at amd.com>
Message-ID:
In-Reply-To: <llvm.org/llvm/llvm-project/pull/109818 at github.com>


================
@@ -304,11 +307,95 @@ bool SIPreEmitPeephole::getBlockDestinations(
   return true;
 }
 
-bool SIPreEmitPeephole::mustRetainExeczBranch(
-    const MachineBasicBlock &From, const MachineBasicBlock &To) const {
+namespace {
+class CostModelBase {
+public:
+  virtual bool isProfitable(const MachineInstr &MI) = 0;
+  virtual ~CostModelBase() = default;
+  static std::unique_ptr<CostModelBase> Create(const MachineBasicBlock &MBB,
+                                               const MachineBasicBlock &,
+                                               const SIInstrInfo &TII);
+};
+
+class TrivialCostModel : public CostModelBase {
+  friend CostModelBase;
+
   unsigned NumInstr = 0;
-  const MachineFunction *MF = From.getParent();
+  const SIInstrInfo &TII;
+
+  TrivialCostModel(const SIInstrInfo &TII) : TII(TII) {}
+
+public:
+  bool isProfitable(const MachineInstr &MI) override {
+    ++NumInstr;
+    if (NumInstr >= SkipThreshold)
+      return false;
+    // These instructions are potentially expensive even if EXEC = 0.
+    if (TII.isSMRD(MI) || TII.isVMEM(MI) || TII.isFLAT(MI) || TII.isDS(MI) ||
+        TII.isWaitcnt(MI.getOpcode()))
+      return false;
+    return true;
+  }
+  ~TrivialCostModel() override = default;
+};
+
+class BranchWeightCostModel : public CostModelBase {
+  friend CostModelBase;
+
+  unsigned long ExecNZBranchCost;
+  unsigned long UnconditionalBranchCost;
+  unsigned long N;
+  unsigned long D;
----------------
arsenm wrote:

Give these full names, I don't know what these mean.

Should probably use uint64_t instead of unsigned long? 

https://github.com/llvm/llvm-project/pull/109818


More information about the llvm-commits mailing list