[llvm] r347985 - [llvm-mca] Simplify code in class Scheduler. NFCI
Andrea Di Biagio via llvm-commits
llvm-commits at lists.llvm.org
Fri Nov 30 04:49:30 PST 2018
Author: adibiagio
Date: Fri Nov 30 04:49:30 2018
New Revision: 347985
URL: http://llvm.org/viewvc/llvm-project?rev=347985&view=rev
Log:
[llvm-mca] Simplify code in class Scheduler. NFCI
Modified:
llvm/trunk/tools/llvm-mca/include/HardwareUnits/LSUnit.h
llvm/trunk/tools/llvm-mca/include/HardwareUnits/Scheduler.h
llvm/trunk/tools/llvm-mca/lib/Context.cpp
llvm/trunk/tools/llvm-mca/lib/HardwareUnits/Scheduler.cpp
Modified: llvm/trunk/tools/llvm-mca/include/HardwareUnits/LSUnit.h
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/tools/llvm-mca/include/HardwareUnits/LSUnit.h?rev=347985&r1=347984&r2=347985&view=diff
==============================================================================
--- llvm/trunk/tools/llvm-mca/include/HardwareUnits/LSUnit.h (original)
+++ llvm/trunk/tools/llvm-mca/include/HardwareUnits/LSUnit.h Fri Nov 30 04:49:30 2018
@@ -24,6 +24,7 @@ namespace llvm {
namespace mca {
class InstRef;
+class Scheduler;
/// A Load/Store Unit implementing a load and store queues.
///
@@ -110,7 +111,7 @@ class LSUnit : public HardwareUnit {
//
// This class doesn't know about the latency of a load instruction. So, it
// conservatively/pessimistically assumes that the latency of a load opcode
- // matches the instruction latency.
+ // matches the instruction latency.
//
// FIXME: In the absence of cache misses (i.e. L1I/L1D/iTLB/dTLB hits/misses),
// and load/store conflicts, the latency of a load is determined by the depth
@@ -195,7 +196,7 @@ public:
// becomes available to the users. At that point, the load no longer needs to
// be tracked by the load queue.
// FIXME: For simplicity, we optimistically assume a similar behavior for
- // store instructions. In practice, store operation don't tend to leave the
+ // store instructions. In practice, store operations don't tend to leave the
// store queue until they reach the 'Retired' stage (See PR39830).
void onInstructionExecuted(const InstRef &IR);
};
Modified: llvm/trunk/tools/llvm-mca/include/HardwareUnits/Scheduler.h
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/tools/llvm-mca/include/HardwareUnits/Scheduler.h?rev=347985&r1=347984&r2=347985&view=diff
==============================================================================
--- llvm/trunk/tools/llvm-mca/include/HardwareUnits/Scheduler.h (original)
+++ llvm/trunk/tools/llvm-mca/include/HardwareUnits/Scheduler.h Fri Nov 30 04:49:30 2018
@@ -85,7 +85,7 @@ public:
/// transition (i.e. from state IS_READY, to state IS_EXECUTING). An Instruction
/// leaves the IssuedSet when it reaches the write-back stage.
class Scheduler : public HardwareUnit {
- LSUnit *LSU;
+ LSUnit &LSU;
// Instruction selection strategy for this Scheduler.
std::unique_ptr<SchedulerStrategy> Strategy;
@@ -117,16 +117,15 @@ class Scheduler : public HardwareUnit {
void promoteToReadySet(SmallVectorImpl<InstRef> &Ready);
public:
- Scheduler(const MCSchedModel &Model, LSUnit *Lsu)
- : LSU(Lsu), Resources(make_unique<ResourceManager>(Model)) {
- initializeStrategy(nullptr);
- }
- Scheduler(const MCSchedModel &Model, LSUnit *Lsu,
+ Scheduler(const MCSchedModel &Model, LSUnit &Lsu)
+ : Scheduler(Model, Lsu, nullptr) {}
+
+ Scheduler(const MCSchedModel &Model, LSUnit &Lsu,
std::unique_ptr<SchedulerStrategy> SelectStrategy)
- : LSU(Lsu), Resources(make_unique<ResourceManager>(Model)) {
- initializeStrategy(std::move(SelectStrategy));
- }
- Scheduler(std::unique_ptr<ResourceManager> RM, LSUnit *Lsu,
+ : Scheduler(make_unique<ResourceManager>(Model), Lsu,
+ std::move(SelectStrategy)) {}
+
+ Scheduler(std::unique_ptr<ResourceManager> RM, LSUnit &Lsu,
std::unique_ptr<SchedulerStrategy> SelectStrategy)
: LSU(Lsu), Resources(std::move(RM)) {
initializeStrategy(std::move(SelectStrategy));
Modified: llvm/trunk/tools/llvm-mca/lib/Context.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/tools/llvm-mca/lib/Context.cpp?rev=347985&r1=347984&r2=347985&view=diff
==============================================================================
--- llvm/trunk/tools/llvm-mca/lib/Context.cpp (original)
+++ llvm/trunk/tools/llvm-mca/lib/Context.cpp Fri Nov 30 04:49:30 2018
@@ -37,7 +37,7 @@ Context::createDefaultPipeline(const Pip
auto PRF = llvm::make_unique<RegisterFile>(SM, MRI, Opts.RegisterFileSize);
auto LSU = llvm::make_unique<LSUnit>(SM, Opts.LoadQueueSize,
Opts.StoreQueueSize, Opts.AssumeNoAlias);
- auto HWS = llvm::make_unique<Scheduler>(SM, LSU.get());
+ auto HWS = llvm::make_unique<Scheduler>(SM, *LSU);
// Create the pipeline stages.
auto Fetch = llvm::make_unique<EntryStage>(SrcMgr);
Modified: llvm/trunk/tools/llvm-mca/lib/HardwareUnits/Scheduler.cpp
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/tools/llvm-mca/lib/HardwareUnits/Scheduler.cpp?rev=347985&r1=347984&r2=347985&view=diff
==============================================================================
--- llvm/trunk/tools/llvm-mca/lib/HardwareUnits/Scheduler.cpp (original)
+++ llvm/trunk/tools/llvm-mca/lib/HardwareUnits/Scheduler.cpp Fri Nov 30 04:49:30 2018
@@ -51,7 +51,7 @@ Scheduler::Status Scheduler::isAvailable
}
// Give lower priority to LSUnit stall events.
- switch (LSU->isAvailable(IR)) {
+ switch (LSU.isAvailable(IR)) {
case LSUnit::LSU_LQUEUE_FULL:
return Scheduler::SC_LOAD_QUEUE_FULL;
case LSUnit::LSU_SQUEUE_FULL:
@@ -80,7 +80,7 @@ void Scheduler::issueInstructionImpl(
if (IS->isExecuting())
IssuedSet.emplace_back(IR);
else if (IS->isExecuted())
- LSU->onInstructionExecuted(IR);
+ LSU.onInstructionExecuted(IR);
}
// Release the buffered resources and issue the instruction.
@@ -170,7 +170,7 @@ void Scheduler::updateIssuedSet(SmallVec
}
// Instruction IR has completed execution.
- LSU->onInstructionExecuted(IR);
+ LSU.onInstructionExecuted(IR);
Executed.emplace_back(IR);
++RemovedElements;
IR.invalidate();
@@ -213,7 +213,7 @@ void Scheduler::dispatch(const InstRef &
// If necessary, reserve queue entries in the load-store unit (LSU).
bool IsMemOp = Desc.MayLoad || Desc.MayStore;
if (IsMemOp)
- LSU->dispatch(IR);
+ LSU.dispatch(IR);
if (!isReady(IR)) {
LLVM_DEBUG(dbgs() << "[SCHEDULER] Adding #" << IR << " to the WaitSet\n");
@@ -238,7 +238,7 @@ void Scheduler::dispatch(const InstRef &
bool Scheduler::isReady(const InstRef &IR) const {
const InstrDesc &Desc = IR.getInstruction()->getDesc();
bool IsMemOp = Desc.MayLoad || Desc.MayStore;
- return IR.getInstruction()->isReady() && (!IsMemOp || LSU->isReady(IR));
+ return IR.getInstruction()->isReady() && (!IsMemOp || LSU.isReady(IR));
}
} // namespace mca
More information about the llvm-commits
mailing list