[llvm] [RFC][AMDGPU] Add vulkan:private/nonprivate MMRAs support (PR #78573)

Sameer Sahasrabuddhe via llvm-commits llvm-commits at lists.llvm.org
Sun Jun 2 22:16:36 PDT 2024


================
@@ -679,15 +713,39 @@ class SIMemoryLegalizer final : public MachineFunctionPass {
   bool runOnMachineFunction(MachineFunction &MF) override;
 };
 
-} // end namespace anonymous
-
-void SIMemOpAccess::reportUnsupported(const MachineBasicBlock::iterator &MI,
-                                      const char *Msg) const {
+void reportUnsupported(const MachineBasicBlock::iterator &MI, const char *Msg,
+                       bool Warn = false) {
   const Function &Func = MI->getParent()->getParent()->getFunction();
-  DiagnosticInfoUnsupported Diag(Func, Msg, MI->getDebugLoc());
+  DiagnosticInfoUnsupported Diag(Func, Msg, MI->getDebugLoc(),
+                                 Warn ? DS_Warning : DS_Error);
   Func.getContext().diagnose(Diag);
 }
 
+} // namespace
+
+VulkanOpKind SIMemOpAccess::getVulkanOpKind(const MachineInstr &MI) const {
+  VulkanOpKind VKOK = VulkanOpKind::None;
+
+  if (auto MMRA = MMRAMetadata(MI.getMMRAMetadata())) {
+    if (MMRA.hasTag("vulkan", "private"))
+      return VulkanOpKind::Private;
+    else if (MMRA.hasTag("vulkan", "nonprivate"))
+      return VulkanOpKind::NonPrivate;
+  }
+
+  // Don't cry if amdgcn-force-vulkan-memorymodel=0, that way it can be used as
+  // an escape hatch to force-ignore vulkan MMRAs.
+  if (!HasVulkanMM && VKOK != VulkanOpKind::None &&
+      ForceVulkanMM != cl::BOU_FALSE) {
+    report_fatal_error(
+        "vulkan:private/nonprivate annotations can only be honored if the "
+        "Vulkan memory model is enabled. Strip vulkan MMRAs or disable the "
+        "Vulkan memory model using -amdgcn-force-vulkan-memorymodel=0");
+  }
----------------
ssahasra wrote:

Maybe a better cl::opt would be "-amdgcn-enable-experimental-mmra"

https://github.com/llvm/llvm-project/pull/78573


More information about the llvm-commits mailing list