[llvm] ab2e766 - [mlgo][inl] Interactive mode: optionally tell the default decision

Mircea Trofin via llvm-commits llvm-commits at lists.llvm.org
Mon Apr 10 12:20:17 PDT 2023


Author: Mircea Trofin
Date: 2023-04-10T12:20:09-07:00
New Revision: ab2e7666c20d00a43b958e91c24991c973c81393

URL: https://github.com/llvm/llvm-project/commit/ab2e7666c20d00a43b958e91c24991c973c81393
DIFF: https://github.com/llvm/llvm-project/commit/ab2e7666c20d00a43b958e91c24991c973c81393.diff

LOG: [mlgo][inl] Interactive mode: optionally tell the default decision

This helps training algorithms that may want to sometimes replicate the
default decision. The default decision is presented as an extra feature
called `inlining_default`. It's not normally exported to save
computation time.

This is only available in interactive mode.

Differential Revision: https://reviews.llvm.org/D147794

Added: 
    

Modified: 
    llvm/include/llvm/Analysis/InlineAdvisor.h
    llvm/include/llvm/Analysis/InlineModelFeatureMaps.h
    llvm/include/llvm/Analysis/MLInlineAdvisor.h
    llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp
    llvm/lib/Analysis/InlineAdvisor.cpp
    llvm/lib/Analysis/MLInlineAdvisor.cpp
    llvm/test/Transforms/Inline/ML/interactive-mode.ll

Removed: 
    


################################################################################
diff  --git a/llvm/include/llvm/Analysis/InlineAdvisor.h b/llvm/include/llvm/Analysis/InlineAdvisor.h
index c67698777775b..53c018d15cd71 100644
--- a/llvm/include/llvm/Analysis/InlineAdvisor.h
+++ b/llvm/include/llvm/Analysis/InlineAdvisor.h
@@ -357,7 +357,8 @@ class InlineAdvisorAnalysisPrinterPass
 };
 
 std::unique_ptr<InlineAdvisor>
-getReleaseModeAdvisor(Module &M, ModuleAnalysisManager &MAM);
+getReleaseModeAdvisor(Module &M, ModuleAnalysisManager &MAM,
+                      std::function<bool(CallBase &)> GetDefaultAdvice);
 
 std::unique_ptr<InlineAdvisor>
 getDevelopmentModeAdvisor(Module &M, ModuleAnalysisManager &MAM,

diff  --git a/llvm/include/llvm/Analysis/InlineModelFeatureMaps.h b/llvm/include/llvm/Analysis/InlineModelFeatureMaps.h
index 0418a2b36020c..a64e4c3cb7274 100644
--- a/llvm/include/llvm/Analysis/InlineModelFeatureMaps.h
+++ b/llvm/include/llvm/Analysis/InlineModelFeatureMaps.h
@@ -134,6 +134,7 @@ extern const std::vector<TensorSpec> FeatureMap;
 extern const char *const DecisionName;
 extern const TensorSpec InlineDecisionSpec;
 extern const char *const DefaultDecisionName;
+extern const TensorSpec DefaultDecisionSpec;
 extern const char *const RewardName;
 
 using InlineFeatures = std::vector<int64_t>;

diff  --git a/llvm/include/llvm/Analysis/MLInlineAdvisor.h b/llvm/include/llvm/Analysis/MLInlineAdvisor.h
index 23ada7fe6a25b..f58862e533529 100644
--- a/llvm/include/llvm/Analysis/MLInlineAdvisor.h
+++ b/llvm/include/llvm/Analysis/MLInlineAdvisor.h
@@ -28,7 +28,8 @@ class MLInlineAdvice;
 class MLInlineAdvisor : public InlineAdvisor {
 public:
   MLInlineAdvisor(Module &M, ModuleAnalysisManager &MAM,
-                  std::unique_ptr<MLModelRunner> ModelRunner);
+                  std::unique_ptr<MLModelRunner> ModelRunner,
+                  std::function<bool(CallBase &)> GetDefaultAdvice);
 
   virtual ~MLInlineAdvisor() = default;
 
@@ -63,6 +64,7 @@ class MLInlineAdvisor : public InlineAdvisor {
   unsigned getInitialFunctionLevel(const Function &F) const;
 
   std::unique_ptr<MLModelRunner> ModelRunner;
+  std::function<bool(CallBase &)> GetDefaultAdvice;
 
 private:
   int64_t getModuleIRSize() const;

diff  --git a/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp b/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp
index 605e5902460c5..456d58660680d 100644
--- a/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp
+++ b/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp
@@ -165,7 +165,6 @@ class DevelopmentModeMLInlineAdvisor : public MLInlineAdvisor {
   bool isLogging() const { return !!Logger; }
   std::unique_ptr<MLInlineAdvice> getMandatoryAdviceImpl(CallBase &CB) override;
 
-  std::function<bool(CallBase &)> GetDefaultAdvice;
   const bool IsDoingInference;
   std::unique_ptr<TrainingLogger> Logger;
 
@@ -280,7 +279,7 @@ TrainingLogger::TrainingLogger(StringRef LogFileName,
     append_range(FT, MUTR->extraOutputsForLoggingSpecs());
 
   DefaultDecisionPos = FT.size();
-  FT.push_back(TensorSpec::createSpec<int64_t>(DefaultDecisionName, {1}));
+  FT.push_back(DefaultDecisionSpec);
 
   DecisionPos = FT.size();
   FT.push_back(InlineDecisionSpec);
@@ -331,8 +330,7 @@ DevelopmentModeMLInlineAdvisor::DevelopmentModeMLInlineAdvisor(
     std::unique_ptr<MLModelRunner> ModelRunner,
     std::function<bool(CallBase &)> GetDefaultAdvice,
     std::unique_ptr<TrainingLogger> Logger)
-    : MLInlineAdvisor(M, MAM, std::move(ModelRunner)),
-      GetDefaultAdvice(GetDefaultAdvice),
+    : MLInlineAdvisor(M, MAM, std::move(ModelRunner), GetDefaultAdvice),
       IsDoingInference(isa<ModelUnderTrainingRunner>(getModelRunner())),
       Logger(std::move(Logger)),
       InitialNativeSize(isLogging() ? getTotalSizeEstimate() : 0),

diff  --git a/llvm/lib/Analysis/InlineAdvisor.cpp b/llvm/lib/Analysis/InlineAdvisor.cpp
index 16de1cf5215b6..88c262bb882be 100644
--- a/llvm/lib/Analysis/InlineAdvisor.cpp
+++ b/llvm/lib/Analysis/InlineAdvisor.cpp
@@ -208,6 +208,10 @@ bool InlineAdvisorAnalysis::Result::tryCreate(
     Advisor.reset(DA.Factory(M, FAM, Params, IC));
     return !!Advisor;
   }
+  auto GetDefaultAdvice = [&FAM, Params](CallBase &CB) {
+    auto OIC = getDefaultInlineAdvice(CB, FAM, Params);
+    return OIC.has_value();
+  };
   switch (Mode) {
   case InliningAdvisorMode::Default:
     LLVM_DEBUG(dbgs() << "Using default inliner heuristic.\n");
@@ -223,16 +227,12 @@ bool InlineAdvisorAnalysis::Result::tryCreate(
   case InliningAdvisorMode::Development:
 #ifdef LLVM_HAVE_TFLITE
     LLVM_DEBUG(dbgs() << "Using development-mode inliner policy.\n");
-    Advisor =
-        llvm::getDevelopmentModeAdvisor(M, MAM, [&FAM, Params](CallBase &CB) {
-          auto OIC = getDefaultInlineAdvice(CB, FAM, Params);
-          return OIC.has_value();
-        });
+    Advisor = llvm::getDevelopmentModeAdvisor(M, MAM, GetDefaultAdvice);
 #endif
     break;
   case InliningAdvisorMode::Release:
     LLVM_DEBUG(dbgs() << "Using release-mode inliner policy.\n");
-    Advisor = llvm::getReleaseModeAdvisor(M, MAM);
+    Advisor = llvm::getReleaseModeAdvisor(M, MAM, GetDefaultAdvice);
     break;
   }
 

diff  --git a/llvm/lib/Analysis/MLInlineAdvisor.cpp b/llvm/lib/Analysis/MLInlineAdvisor.cpp
index 0a23f2efd2c61..dcf341c112e5b 100644
--- a/llvm/lib/Analysis/MLInlineAdvisor.cpp
+++ b/llvm/lib/Analysis/MLInlineAdvisor.cpp
@@ -38,6 +38,12 @@ static cl::opt<std::string> InteractiveChannelBaseName(
         "Base file path for the interactive mode. The incoming filename should "
         "have the name <inliner-interactive-channel-base>.in, while the "
         "outgoing name should be <inliner-interactive-channel-base>.out"));
+static cl::opt<bool> InteractiveIncludeDefault(
+    "inliner-interactive-include-default", cl::Hidden,
+    cl::desc(
+        (Twine("In interactive mode, also send the default policy decision: ") +
+         DefaultDecisionName + ".")
+            .str()));
 
 #if defined(LLVM_HAVE_TF_AOT_INLINERSIZEMODEL)
 // codegen-ed file
@@ -48,7 +54,8 @@ using CompiledModelType = NoopSavedModelImpl;
 #endif
 
 std::unique_ptr<InlineAdvisor>
-llvm::getReleaseModeAdvisor(Module &M, ModuleAnalysisManager &MAM) {
+llvm::getReleaseModeAdvisor(Module &M, ModuleAnalysisManager &MAM,
+                            std::function<bool(CallBase &)> GetDefaultAdvice) {
   if (!llvm::isEmbeddedModelEvaluatorValid<CompiledModelType>() &&
       InteractiveChannelBaseName.empty())
     return nullptr;
@@ -56,12 +63,17 @@ llvm::getReleaseModeAdvisor(Module &M, ModuleAnalysisManager &MAM) {
   if (InteractiveChannelBaseName.empty())
     AOTRunner = std::make_unique<ReleaseModeModelRunner<CompiledModelType>>(
         M.getContext(), FeatureMap, DecisionName);
-  else
+  else {
+    auto Features = FeatureMap;
+    if (InteractiveIncludeDefault)
+      Features.push_back(DefaultDecisionSpec);
     AOTRunner = std::make_unique<InteractiveModelRunner>(
-        M.getContext(), FeatureMap, InlineDecisionSpec,
+        M.getContext(), Features, InlineDecisionSpec,
         InteractiveChannelBaseName + ".out",
         InteractiveChannelBaseName + ".in");
-  return std::make_unique<MLInlineAdvisor>(M, MAM, std::move(AOTRunner));
+  }
+  return std::make_unique<MLInlineAdvisor>(M, MAM, std::move(AOTRunner),
+                                           GetDefaultAdvice);
 }
 
 #define DEBUG_TYPE "inline-ml"
@@ -96,6 +108,8 @@ const char *const llvm::DecisionName = "inlining_decision";
 const TensorSpec llvm::InlineDecisionSpec =
     TensorSpec::createSpec<int64_t>(DecisionName, {1});
 const char *const llvm::DefaultDecisionName = "inlining_default";
+const TensorSpec llvm::DefaultDecisionSpec =
+    TensorSpec::createSpec<int64_t>(DefaultDecisionName, {1});
 const char *const llvm::RewardName = "delta_size";
 
 CallBase *getInlinableCS(Instruction &I) {
@@ -108,11 +122,13 @@ CallBase *getInlinableCS(Instruction &I) {
   return nullptr;
 }
 
-MLInlineAdvisor::MLInlineAdvisor(Module &M, ModuleAnalysisManager &MAM,
-                                 std::unique_ptr<MLModelRunner> Runner)
+MLInlineAdvisor::MLInlineAdvisor(
+    Module &M, ModuleAnalysisManager &MAM,
+    std::unique_ptr<MLModelRunner> Runner,
+    std::function<bool(CallBase &)> GetDefaultAdvice)
     : InlineAdvisor(
           M, MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager()),
-      ModelRunner(std::move(Runner)),
+      ModelRunner(std::move(Runner)), GetDefaultAdvice(GetDefaultAdvice),
       CG(MAM.getResult<LazyCallGraphAnalysis>(M)),
       InitialIRSize(getModuleIRSize()), CurrentIRSize(InitialIRSize) {
   assert(ModelRunner);
@@ -393,7 +409,10 @@ std::unique_ptr<InlineAdvice> MLInlineAdvisor::getAdviceImpl(CallBase &CB) {
     *ModelRunner->getTensor<int64_t>(inlineCostFeatureToMlFeature(
         static_cast<InlineCostFeatureIndex>(I))) = CostFeatures->at(I);
   }
-
+  // This one would have been set up to be right at the end.
+  if (!InteractiveChannelBaseName.empty() && InteractiveIncludeDefault)
+    *ModelRunner->getTensor<int64_t>(InlineCostFeatureIndex::NumberOfFeatures) =
+        GetDefaultAdvice(CB);
   return getAdviceFromModel(CB, ORE);
 }
 

diff  --git a/llvm/test/Transforms/Inline/ML/interactive-mode.ll b/llvm/test/Transforms/Inline/ML/interactive-mode.ll
index 07f98352d5c3c..00a64c6c34d07 100644
--- a/llvm/test/Transforms/Inline/ML/interactive-mode.ll
+++ b/llvm/test/Transforms/Inline/ML/interactive-mode.ll
@@ -7,7 +7,12 @@
 ; RUN: cp %S/Inputs/interactive_main.py %t.rundir
 ; RUN: %python %t.rundir/interactive_main.py %t.channel-basename \
 ; RUN:    opt -passes=scc-oz-module-inliner -interactive-model-runner-echo-reply \
-; RUN:    -enable-ml-inliner=release --inliner-interactive-channel-base=%t.channel-basename %S/Inputs/test-module.ll -S -o /dev/null | FileCheck %s
+; RUN:    -enable-ml-inliner=release -inliner-interactive-channel-base=%t.channel-basename %S/Inputs/test-module.ll -S -o /dev/null | FileCheck %s
+; RUN: %python %t.rundir/interactive_main.py %t.channel-basename \
+; RUN:    opt -passes=scc-oz-module-inliner -interactive-model-runner-echo-reply \
+; RUN:    -inliner-interactive-include-default -enable-ml-inliner=release \
+; RUN:    -inliner-interactive-channel-base=%t.channel-basename %S/Inputs/test-module.ll -S -o /dev/null | FileCheck %s -check-prefixes=CHECK,CHECK-DEFAULT
+
 
 ;; It'd be nice if we had stdout and stderr interleaved, but we don't, so
 ;; let's just check the features have non-zero values, and that we see as many
@@ -17,6 +22,7 @@
 ; CHECK-NEXT: sroa_savings: 0
 ; CHECK:      unsimplified_common_instructions: 5
 ; CHECK:      callee_users: 3
+; CHECK-DEFAULT: inlining_default: 0
 ; CHECK:      observation: 5
 ; CHECK-NOT:  observation: 6
 


        


More information about the llvm-commits mailing list