[flang-commits] [flang] [Flang][MLIR] Extend DataLayout utilities to have basic GPU Module support (PR #123149)
via flang-commits
flang-commits at lists.llvm.org
Wed Jan 29 09:59:49 PST 2025
https://github.com/agozillon updated https://github.com/llvm/llvm-project/pull/123149
>From 7a846c703fa576a686583785f051b7fe4e7d3ce2 Mon Sep 17 00:00:00 2001
From: agozillon <Andrew.Gozillon at amd.com>
Date: Wed, 15 Jan 2025 19:54:57 -0600
Subject: [PATCH] [Flang][MLIR] Extend DataLAyout utilities to have basic GPU
Module support
As there is now certain areas where we now have the possibility of having either a
ModuleOp or GPUModuleOp and both of these modules can have DataLayout's
and we may require utilising the DataLayout utilities in these areas I've taken the
liberty of trying to extend them for use with both.
Those with more knowledge of how they wish the GPUModuleOp's to interact
with their parent ModuleOp's DataLayout may have further alterations they
wish to make in the future, but for the moment, it'll simply utilise the basic
data layout construction which I believe combines parent and child datalayouts
from the ModuleOp and GPUModuleOp. If there is no GPUModuleOp
DataLayout it should default to the parent ModuleOp.
It's worth noting there is some weirdness if you have two module operations
defining builtin dialect DataLayout Entries, it appears the combinatorial
functionality for DataLayouts doesn't support the merging of these.
This behaviour is useful for areas like: https://github.com/llvm/llvm-project/pull/119585/files#diff-19fc4bcb38829d085e25d601d344bbd85bf7ef749ca359e348f4a7c750eae89dR1412 where we
have a crossroads between the two different module operations.
---
.../flang/Optimizer/Support/DataLayout.h | 19 +++++-
flang/lib/Optimizer/CodeGen/CodeGen.cpp | 4 +-
flang/lib/Optimizer/CodeGen/TargetRewrite.cpp | 2 +-
flang/lib/Optimizer/Support/DataLayout.cpp | 65 +++++++++++++++----
.../lib/Optimizer/Transforms/AddDebugInfo.cpp | 2 +-
.../Transforms/CUFAddConstructor.cpp | 2 +-
.../Transforms/CUFGPUToLLVMConversion.cpp | 4 +-
.../Optimizer/Transforms/CUFOpConversion.cpp | 4 +-
.../Optimizer/Transforms/LoopVersioning.cpp | 4 +-
.../lib/OpenACC/TestOpenACCInterfaces.cpp | 2 +-
10 files changed, 79 insertions(+), 29 deletions(-)
diff --git a/flang/include/flang/Optimizer/Support/DataLayout.h b/flang/include/flang/Optimizer/Support/DataLayout.h
index 6072425b7d637f..957ea99162c5bb 100644
--- a/flang/include/flang/Optimizer/Support/DataLayout.h
+++ b/flang/include/flang/Optimizer/Support/DataLayout.h
@@ -18,10 +18,14 @@
namespace mlir {
class ModuleOp;
-}
+namespace gpu {
+class GPUModuleOp;
+} // namespace gpu
+} // namespace mlir
+
namespace llvm {
class DataLayout;
-}
+} // namespace llvm
namespace fir::support {
/// Create an mlir::DataLayoutSpecInterface attribute from an llvm::DataLayout
@@ -30,6 +34,8 @@ namespace fir::support {
/// the llvm::DataLayout on the module.
/// These attributes are replaced if they were already set.
void setMLIRDataLayout(mlir::ModuleOp mlirModule, const llvm::DataLayout &dl);
+void setMLIRDataLayout(mlir::gpu::GPUModuleOp mlirModule,
+ const llvm::DataLayout &dl);
/// Create an mlir::DataLayoutSpecInterface from the llvm.data_layout attribute
/// if one is provided. If such attribute is not available, create a default
@@ -37,6 +43,8 @@ void setMLIRDataLayout(mlir::ModuleOp mlirModule, const llvm::DataLayout &dl);
/// nothing.
void setMLIRDataLayoutFromAttributes(mlir::ModuleOp mlirModule,
bool allowDefaultLayout);
+void setMLIRDataLayoutFromAttributes(mlir::gpu::GPUModuleOp mlirModule,
+ bool allowDefaultLayout);
/// Create mlir::DataLayout from the data layout information on the
/// mlir::Module. Creates the data layout information attributes with
@@ -44,7 +52,12 @@ void setMLIRDataLayoutFromAttributes(mlir::ModuleOp mlirModule,
/// information is present at all and \p allowDefaultLayout is false, returns
/// std::nullopt.
std::optional<mlir::DataLayout>
-getOrSetDataLayout(mlir::ModuleOp mlirModule, bool allowDefaultLayout = false);
+getOrSetMLIRDataLayout(mlir::ModuleOp mlirModule,
+ bool allowDefaultLayout = false);
+std::optional<mlir::DataLayout>
+getOrSetMLIRDataLayout(mlir::gpu::GPUModuleOp mlirModule,
+ bool allowDefaultLayout = false);
+
} // namespace fir::support
#endif // FORTRAN_OPTIMIZER_SUPPORT_DATALAYOUT_H
diff --git a/flang/lib/Optimizer/CodeGen/CodeGen.cpp b/flang/lib/Optimizer/CodeGen/CodeGen.cpp
index 1b078be7bb1c75..cb4eb8303a4959 100644
--- a/flang/lib/Optimizer/CodeGen/CodeGen.cpp
+++ b/flang/lib/Optimizer/CodeGen/CodeGen.cpp
@@ -1190,7 +1190,7 @@ genCUFAllocDescriptor(mlir::Location loc,
mlir::ModuleOp mod, fir::BaseBoxType boxTy,
const fir::LLVMTypeConverter &typeConverter) {
std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(mod, /*allowDefaultLayout=*/true);
+ fir::support::getOrSetMLIRDataLayout(mod, /*allowDefaultLayout=*/true);
if (!dl)
mlir::emitError(mod.getLoc(),
"module operation must carry a data layout attribute "
@@ -3942,7 +3942,7 @@ class FIRToLLVMLowering
return signalPassFailure();
std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(mod, /*allowDefaultLayout=*/true);
+ fir::support::getOrSetMLIRDataLayout(mod, /*allowDefaultLayout=*/true);
if (!dl) {
mlir::emitError(mod.getLoc(),
"module operation must carry a data layout attribute "
diff --git a/flang/lib/Optimizer/CodeGen/TargetRewrite.cpp b/flang/lib/Optimizer/CodeGen/TargetRewrite.cpp
index b0b9499557e2b7..cc6c2b7df825a9 100644
--- a/flang/lib/Optimizer/CodeGen/TargetRewrite.cpp
+++ b/flang/lib/Optimizer/CodeGen/TargetRewrite.cpp
@@ -107,7 +107,7 @@ class TargetRewrite : public fir::impl::TargetRewritePassBase<TargetRewrite> {
// TargetRewrite will require querying the type storage sizes, if it was
// not set already, create a DataLayoutSpec for the ModuleOp now.
std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(mod, /*allowDefaultLayout=*/true);
+ fir::support::getOrSetMLIRDataLayout(mod, /*allowDefaultLayout=*/true);
if (!dl) {
mlir::emitError(mod.getLoc(),
"module operation must carry a data layout attribute "
diff --git a/flang/lib/Optimizer/Support/DataLayout.cpp b/flang/lib/Optimizer/Support/DataLayout.cpp
index 93a3b92d081050..f89ce5984b9198 100644
--- a/flang/lib/Optimizer/Support/DataLayout.cpp
+++ b/flang/lib/Optimizer/Support/DataLayout.cpp
@@ -10,6 +10,7 @@
#include "flang/Optimizer/Dialect/Support/FIRContext.h"
#include "flang/Optimizer/Support/FatalError.h"
#include "mlir/Dialect/DLTI/DLTI.h"
+#include "mlir/Dialect/GPU/IR/GPUDialect.h"
#include "mlir/Dialect/LLVMIR/LLVMDialect.h"
#include "mlir/IR/BuiltinOps.h"
#include "mlir/Interfaces/DataLayoutInterfaces.h"
@@ -20,8 +21,9 @@
#include "llvm/Support/TargetSelect.h"
#include "llvm/Target/TargetMachine.h"
-void fir::support::setMLIRDataLayout(mlir::ModuleOp mlirModule,
- const llvm::DataLayout &dl) {
+namespace {
+template <typename ModOpTy>
+static void setDataLayout(ModOpTy mlirModule, const llvm::DataLayout &dl) {
mlir::MLIRContext *context = mlirModule.getContext();
mlirModule->setAttr(
mlir::LLVM::LLVMDialect::getDataLayoutAttrName(),
@@ -30,12 +32,14 @@ void fir::support::setMLIRDataLayout(mlir::ModuleOp mlirModule,
mlirModule->setAttr(mlir::DLTIDialect::kDataLayoutAttrName, dlSpec);
}
-void fir::support::setMLIRDataLayoutFromAttributes(mlir::ModuleOp mlirModule,
- bool allowDefaultLayout) {
+template <typename ModOpTy>
+static void setDataLayoutFromAttributes(ModOpTy mlirModule,
+ bool allowDefaultLayout) {
if (mlirModule.getDataLayoutSpec())
return; // Already set.
- if (auto dataLayoutString = mlirModule->getAttrOfType<mlir::StringAttr>(
- mlir::LLVM::LLVMDialect::getDataLayoutAttrName())) {
+ if (auto dataLayoutString =
+ mlirModule->template getAttrOfType<mlir::StringAttr>(
+ mlir::LLVM::LLVMDialect::getDataLayoutAttrName())) {
llvm::DataLayout llvmDataLayout(dataLayoutString);
fir::support::setMLIRDataLayout(mlirModule, llvmDataLayout);
return;
@@ -46,15 +50,48 @@ void fir::support::setMLIRDataLayoutFromAttributes(mlir::ModuleOp mlirModule,
fir::support::setMLIRDataLayout(mlirModule, llvmDataLayout);
}
-std::optional<mlir::DataLayout>
-fir::support::getOrSetDataLayout(mlir::ModuleOp mlirModule,
- bool allowDefaultLayout) {
- if (!mlirModule.getDataLayoutSpec()) {
+template <typename ModOpTy>
+static std::optional<mlir::DataLayout>
+getOrSetDataLayout(ModOpTy mlirModule, bool allowDefaultLayout) {
+ if (!mlirModule.getDataLayoutSpec())
fir::support::setMLIRDataLayoutFromAttributes(mlirModule,
allowDefaultLayout);
- if (!mlirModule.getDataLayoutSpec()) {
- return std::nullopt;
- }
- }
+ if (!mlirModule.getDataLayoutSpec() &&
+ !mlir::isa<mlir::gpu::GPUModuleOp>(mlirModule))
+ return std::nullopt;
return mlir::DataLayout(mlirModule);
}
+
+} // namespace
+
+void fir::support::setMLIRDataLayout(mlir::ModuleOp mlirModule,
+ const llvm::DataLayout &dl) {
+ setDataLayout(mlirModule, dl);
+}
+
+void fir::support::setMLIRDataLayout(mlir::gpu::GPUModuleOp mlirModule,
+ const llvm::DataLayout &dl) {
+ setDataLayout(mlirModule, dl);
+}
+
+void fir::support::setMLIRDataLayoutFromAttributes(mlir::ModuleOp mlirModule,
+ bool allowDefaultLayout) {
+ setDataLayoutFromAttributes(mlirModule, allowDefaultLayout);
+}
+
+void fir::support::setMLIRDataLayoutFromAttributes(
+ mlir::gpu::GPUModuleOp mlirModule, bool allowDefaultLayout) {
+ setDataLayoutFromAttributes(mlirModule, allowDefaultLayout);
+}
+
+std::optional<mlir::DataLayout>
+fir::support::getOrSetMLIRDataLayout(mlir::ModuleOp mlirModule,
+ bool allowDefaultLayout) {
+ return getOrSetDataLayout(mlirModule, allowDefaultLayout);
+}
+
+std::optional<mlir::DataLayout>
+fir::support::getOrSetMLIRDataLayout(mlir::gpu::GPUModuleOp mlirModule,
+ bool allowDefaultLayout) {
+ return getOrSetDataLayout(mlirModule, allowDefaultLayout);
+}
diff --git a/flang/lib/Optimizer/Transforms/AddDebugInfo.cpp b/flang/lib/Optimizer/Transforms/AddDebugInfo.cpp
index 16404fcda57b48..3e1209c9ccd547 100644
--- a/flang/lib/Optimizer/Transforms/AddDebugInfo.cpp
+++ b/flang/lib/Optimizer/Transforms/AddDebugInfo.cpp
@@ -523,7 +523,7 @@ void AddDebugInfoPass::runOnOperation() {
llvm::StringRef fileName;
std::string filePath;
std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(module, /*allowDefaultLayout=*/true);
+ fir::support::getOrSetMLIRDataLayout(module, /*allowDefaultLayout=*/true);
if (!dl) {
mlir::emitError(module.getLoc(), "Missing data layout attribute in module");
signalPassFailure();
diff --git a/flang/lib/Optimizer/Transforms/CUFAddConstructor.cpp b/flang/lib/Optimizer/Transforms/CUFAddConstructor.cpp
index 97551595db039c..43ef6822de3028 100644
--- a/flang/lib/Optimizer/Transforms/CUFAddConstructor.cpp
+++ b/flang/lib/Optimizer/Transforms/CUFAddConstructor.cpp
@@ -57,7 +57,7 @@ struct CUFAddConstructor
auto funcTy =
mlir::LLVM::LLVMFunctionType::get(voidTy, {}, /*isVarArg=*/false);
std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(mod, /*allowDefaultLayout=*/false);
+ fir::support::getOrSetMLIRDataLayout(mod, /*allowDefaultLayout=*/false);
if (!dl) {
mlir::emitError(mod.getLoc(),
"data layout attribute is required to perform " +
diff --git a/flang/lib/Optimizer/Transforms/CUFGPUToLLVMConversion.cpp b/flang/lib/Optimizer/Transforms/CUFGPUToLLVMConversion.cpp
index c469b5a95b0447..4611a18a541bcc 100644
--- a/flang/lib/Optimizer/Transforms/CUFGPUToLLVMConversion.cpp
+++ b/flang/lib/Optimizer/Transforms/CUFGPUToLLVMConversion.cpp
@@ -188,8 +188,8 @@ class CUFGPUToLLVMConversion
if (!module)
return signalPassFailure();
- std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(module, /*allowDefaultLayout=*/false);
+ std::optional<mlir::DataLayout> dl = fir::support::getOrSetMLIRDataLayout(
+ module, /*allowDefaultLayout=*/false);
fir::LLVMTypeConverter typeConverter(module, /*applyTBAA=*/false,
/*forceUnifiedTBAATree=*/false, *dl);
cuf::populateCUFGPUToLLVMConversionPatterns(typeConverter, patterns);
diff --git a/flang/lib/Optimizer/Transforms/CUFOpConversion.cpp b/flang/lib/Optimizer/Transforms/CUFOpConversion.cpp
index cc525d703ae57f..ede409d790e2b3 100644
--- a/flang/lib/Optimizer/Transforms/CUFOpConversion.cpp
+++ b/flang/lib/Optimizer/Transforms/CUFOpConversion.cpp
@@ -913,8 +913,8 @@ class CUFOpConversion : public fir::impl::CUFOpConversionBase<CUFOpConversion> {
return signalPassFailure();
mlir::SymbolTable symtab(module);
- std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(module, /*allowDefaultLayout=*/false);
+ std::optional<mlir::DataLayout> dl = fir::support::getOrSetMLIRDataLayout(
+ module, /*allowDefaultLayout=*/false);
fir::LLVMTypeConverter typeConverter(module, /*applyTBAA=*/false,
/*forceUnifiedTBAATree=*/false, *dl);
target.addLegalDialect<fir::FIROpsDialect, mlir::arith::ArithDialect,
diff --git a/flang/lib/Optimizer/Transforms/LoopVersioning.cpp b/flang/lib/Optimizer/Transforms/LoopVersioning.cpp
index b534ec160ce215..0aa89f1f5471e4 100644
--- a/flang/lib/Optimizer/Transforms/LoopVersioning.cpp
+++ b/flang/lib/Optimizer/Transforms/LoopVersioning.cpp
@@ -312,8 +312,8 @@ void LoopVersioningPass::runOnOperation() {
mlir::ModuleOp module = func->getParentOfType<mlir::ModuleOp>();
fir::KindMapping kindMap = fir::getKindMapping(module);
mlir::SmallVector<ArgInfo, 4> argsOfInterest;
- std::optional<mlir::DataLayout> dl =
- fir::support::getOrSetDataLayout(module, /*allowDefaultLayout=*/false);
+ std::optional<mlir::DataLayout> dl = fir::support::getOrSetMLIRDataLayout(
+ module, /*allowDefaultLayout=*/false);
if (!dl)
mlir::emitError(module.getLoc(),
"data layout attribute is required to perform " DEBUG_TYPE
diff --git a/flang/test/lib/OpenACC/TestOpenACCInterfaces.cpp b/flang/test/lib/OpenACC/TestOpenACCInterfaces.cpp
index a01396748f4c2c..5c14809a265e1c 100644
--- a/flang/test/lib/OpenACC/TestOpenACCInterfaces.cpp
+++ b/flang/test/lib/OpenACC/TestOpenACCInterfaces.cpp
@@ -28,7 +28,7 @@ struct TestFIROpenACCInterfaces
void runOnOperation() override {
mlir::ModuleOp mod = getOperation();
auto datalayout =
- fir::support::getOrSetDataLayout(mod, /*allowDefaultLayout=*/true);
+ fir::support::getOrSetMLIRDataLayout(mod, /*allowDefaultLayout=*/true);
mlir::OpBuilder builder(mod);
getOperation().walk([&](Operation *op) {
if (isa<ACC_DATA_ENTRY_OPS>(op)) {
More information about the flang-commits
mailing list