[Mlir-commits] [mlir] [mlir][NFC] update `mlir/examples` create APIs (31/n) (PR #150652)
llvmlistbot at llvm.org
llvmlistbot at llvm.org
Fri Jul 25 10:03:27 PDT 2025
llvmbot wrote:
<!--LLVM PR SUMMARY COMMENT-->
@llvm/pr-subscribers-mlir
Author: Maksim Levental (makslevental)
<details>
<summary>Changes</summary>
See https://github.com/llvm/llvm-project/pull/147168 for more info.
---
Patch is 69.08 KiB, truncated to 20.00 KiB below, full version: https://github.com/llvm/llvm-project/pull/150652.diff
29 Files Affected:
- (modified) mlir/docs/Interfaces.md (+2-2)
- (modified) mlir/docs/PDLL.md (+6-6)
- (modified) mlir/docs/Tutorials/QuickstartRewrites.md (+3-3)
- (modified) mlir/docs/Tutorials/Toy/Ch-2.md (+1-1)
- (modified) mlir/docs/Tutorials/Toy/Ch-4.md (+1-1)
- (modified) mlir/docs/Tutorials/Toy/Ch-5.md (+1-1)
- (modified) mlir/docs/Tutorials/Toy/Ch-6.md (+1-1)
- (modified) mlir/docs/Tutorials/Toy/Ch-7.md (+2-2)
- (modified) mlir/examples/toy/Ch2/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch2/mlir/MLIRGen.cpp (+14-14)
- (modified) mlir/examples/toy/Ch3/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch3/mlir/MLIRGen.cpp (+14-14)
- (modified) mlir/examples/toy/Ch4/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch4/mlir/Dialect.cpp (+5-3)
- (modified) mlir/examples/toy/Ch4/mlir/MLIRGen.cpp (+14-14)
- (modified) mlir/examples/toy/Ch5/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch5/mlir/Dialect.cpp (+5-3)
- (modified) mlir/examples/toy/Ch5/mlir/LowerToAffineLoops.cpp (+19-19)
- (modified) mlir/examples/toy/Ch5/mlir/MLIRGen.cpp (+14-14)
- (modified) mlir/examples/toy/Ch6/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch6/mlir/Dialect.cpp (+5-3)
- (modified) mlir/examples/toy/Ch6/mlir/LowerToAffineLoops.cpp (+19-19)
- (modified) mlir/examples/toy/Ch6/mlir/LowerToLLVM.cpp (+22-23)
- (modified) mlir/examples/toy/Ch6/mlir/MLIRGen.cpp (+14-14)
- (modified) mlir/examples/toy/Ch7/include/toy/Ops.td (+1-1)
- (modified) mlir/examples/toy/Ch7/mlir/Dialect.cpp (+7-6)
- (modified) mlir/examples/toy/Ch7/mlir/LowerToAffineLoops.cpp (+19-19)
- (modified) mlir/examples/toy/Ch7/mlir/LowerToLLVM.cpp (+22-23)
- (modified) mlir/examples/toy/Ch7/mlir/MLIRGen.cpp (+19-17)
``````````diff
diff --git a/mlir/docs/Interfaces.md b/mlir/docs/Interfaces.md
index bf590ac3351ee..7e1c5fe075675 100644
--- a/mlir/docs/Interfaces.md
+++ b/mlir/docs/Interfaces.md
@@ -563,7 +563,7 @@ def MyInterface : OpInterface<"MyInterface"> {
template <typename ConcreteOp>
struct Model : public Concept {
Operation *create(OpBuilder &builder, Location loc) const override {
- return builder.create<ConcreteOp>(loc);
+ return ConcreteOp::create(builder, loc);
}
}
};
@@ -574,7 +574,7 @@ def MyInterface : OpInterface<"MyInterface"> {
}],
"Operation *", "create", (ins "OpBuilder &":$builder, "Location":$loc),
/*methodBody=*/[{
- return builder.create<ConcreteOp>(loc);
+ return ConcreteOp::create(builder, loc);
}]>,
InterfaceMethod<[{
diff --git a/mlir/docs/PDLL.md b/mlir/docs/PDLL.md
index 9839d1d0df764..c6e352fd647da 100644
--- a/mlir/docs/PDLL.md
+++ b/mlir/docs/PDLL.md
@@ -1483,7 +1483,7 @@ be defined by specifying a string code block after the rewrite declaration:
```pdll
Rewrite BuildOp(value: Value) -> (foo: Op<my_dialect.foo>, bar: Op<my_dialect.bar>) [{
- return {rewriter.create<my_dialect::FooOp>(value), rewriter.create<my_dialect::BarOp>()};
+ return {my_dialect::FooOp::create(rewriter, value), my_dialect::BarOp::create(rewriter)};
}];
Pattern {
@@ -1508,7 +1508,7 @@ translated into:
```c++
std::tuple<my_dialect::FooOp, my_dialect::BarOp> BuildOp(Value value) {
- return {rewriter.create<my_dialect::FooOp>(value), rewriter.create<my_dialect::BarOp>()};
+ return {my_dialect::FooOp::create(rewriter, value), my_dialect::BarOp::create(rewriter)};
}
```
@@ -1530,7 +1530,7 @@ below describes the various result translation scenarios:
```pdll
Rewrite createOp() [{
- rewriter.create<my_dialect::FooOp>();
+ my_dialect::FooOp::create(rewriter);
}];
```
@@ -1538,7 +1538,7 @@ In the case where a native `Rewrite` has no results, the native function returns
```c++
void createOp(PatternRewriter &rewriter) {
- rewriter.create<my_dialect::FooOp>();
+ my_dialect::FooOp::create(rewriter);
}
```
@@ -1546,7 +1546,7 @@ void createOp(PatternRewriter &rewriter) {
```pdll
Rewrite createOp() -> Op<my_dialect.foo> [{
- return rewriter.create<my_dialect::FooOp>();
+ return my_dialect::FooOp::create(rewriter);
}];
```
@@ -1555,7 +1555,7 @@ native type for that single result:
```c++
my_dialect::FooOp createOp(PatternRewriter &rewriter) {
- return rewriter.create<my_dialect::FooOp>();
+ return my_dialect::FooOp::create(rewriter);
}
```
diff --git a/mlir/docs/Tutorials/QuickstartRewrites.md b/mlir/docs/Tutorials/QuickstartRewrites.md
index 0c890659b0eea..cbb6f03e93e65 100644
--- a/mlir/docs/Tutorials/QuickstartRewrites.md
+++ b/mlir/docs/Tutorials/QuickstartRewrites.md
@@ -130,7 +130,7 @@ def : Pat<(TF_LeakyReluOp:$old_value, $arg, F32Attr:$a),
```c++
static Value createTFLLeakyRelu(PatternRewriter &rewriter, Operation *op,
Value operand, Attribute attr) {
- return rewriter.create<mlir::TFL::LeakyReluOp>(
+ return mlir::TFL::LeakyReluOp::create(rewriter,
op->getLoc(), operands[0].getType(), /*arg=*/operands[0],
/*alpha=*/cast<FloatAttr>(attrs[0]));
}
@@ -194,10 +194,10 @@ LogicalResult circt::MulOp::canonicalize(MulOp op, PatternRewriter &rewriter) {
// mul(x, c) -> shl(x, log2(c)), where c is a power of two.
if (inputs.size() == 2 && matchPattern(inputs.back(), m_RConstant(value)) &&
value.isPowerOf2()) {
- auto shift = rewriter.create<rtl::ConstantOp>(op.getLoc(), op.getType(),
+ auto shift = rtl::ConstantOp::create(rewriter, op.getLoc(), op.getType(),
value.exactLogBase2());
auto shlOp =
- rewriter.create<comb::ShlOp>(op.getLoc(), inputs[0], shift);
+ comb::ShlOp::create(rewriter, op.getLoc(), inputs[0], shift);
rewriter.replaceOpWithNewOp<MulOp>(op, op.getType(),
ArrayRef<Value>(shlOp));
return success();
diff --git a/mlir/docs/Tutorials/Toy/Ch-2.md b/mlir/docs/Tutorials/Toy/Ch-2.md
index 039417c9c9a19..81e41615ee55d 100644
--- a/mlir/docs/Tutorials/Toy/Ch-2.md
+++ b/mlir/docs/Tutorials/Toy/Ch-2.md
@@ -521,7 +521,7 @@ def ConstantOp : Toy_Op<"constant"> {
// Add custom build methods for the constant operation. These methods populate
// the `state` that MLIR uses to create operations, i.e. these are used when
- // using `builder.create<ConstantOp>(...)`.
+ // using `ConstantOp::create(builder, ...)`.
let builders = [
// Build a constant with a given constant tensor value.
OpBuilder<(ins "DenseElementsAttr":$value), [{
diff --git a/mlir/docs/Tutorials/Toy/Ch-4.md b/mlir/docs/Tutorials/Toy/Ch-4.md
index 1275d36de3531..1bba269ca9a1e 100644
--- a/mlir/docs/Tutorials/Toy/Ch-4.md
+++ b/mlir/docs/Tutorials/Toy/Ch-4.md
@@ -300,7 +300,7 @@ struct ToyInlinerInterface : public DialectInlinerInterface {
Operation *materializeCallConversion(OpBuilder &builder, Value input,
Type resultType,
Location conversionLoc) const final {
- return builder.create<CastOp>(conversionLoc, resultType, input);
+ return CastOp::create(builder, conversionLoc, resultType, input);
}
};
```
diff --git a/mlir/docs/Tutorials/Toy/Ch-5.md b/mlir/docs/Tutorials/Toy/Ch-5.md
index d483cd8bba21d..c750c07ddfc04 100644
--- a/mlir/docs/Tutorials/Toy/Ch-5.md
+++ b/mlir/docs/Tutorials/Toy/Ch-5.md
@@ -136,7 +136,7 @@ struct TransposeOpLowering : public mlir::ConversionPattern {
// Transpose the elements by generating a load from the reverse
// indices.
SmallVector<mlir::Value, 2> reverseIvs(llvm::reverse(loopIvs));
- return rewriter.create<mlir::AffineLoadOp>(loc, input, reverseIvs);
+ return mlir::AffineLoadOp::create(rewriter, loc, input, reverseIvs);
});
return success();
}
diff --git a/mlir/docs/Tutorials/Toy/Ch-6.md b/mlir/docs/Tutorials/Toy/Ch-6.md
index e8a68b5f9ee38..529de55304206 100644
--- a/mlir/docs/Tutorials/Toy/Ch-6.md
+++ b/mlir/docs/Tutorials/Toy/Ch-6.md
@@ -47,7 +47,7 @@ static FlatSymbolRefAttr getOrInsertPrintf(PatternRewriter &rewriter,
// Insert the printf function into the body of the parent module.
PatternRewriter::InsertionGuard insertGuard(rewriter);
rewriter.setInsertionPointToStart(module.getBody());
- rewriter.create<LLVM::LLVMFuncOp>(module.getLoc(), "printf", llvmFnType);
+ LLVM::LLVMFuncOp::create(rewriter, module.getLoc(), "printf", llvmFnType);
return SymbolRefAttr::get("printf", context);
}
```
diff --git a/mlir/docs/Tutorials/Toy/Ch-7.md b/mlir/docs/Tutorials/Toy/Ch-7.md
index dce3490aeace4..0f50c49a5f64d 100644
--- a/mlir/docs/Tutorials/Toy/Ch-7.md
+++ b/mlir/docs/Tutorials/Toy/Ch-7.md
@@ -488,9 +488,9 @@ mlir::Operation *ToyDialect::materializeConstant(mlir::OpBuilder &builder,
mlir::Type type,
mlir::Location loc) {
if (isa<StructType>(type))
- return builder.create<StructConstantOp>(loc, type,
+ return StructConstantOp::create(builder, loc, type,
cast<mlir::ArrayAttr>(value));
- return builder.create<ConstantOp>(loc, type,
+ return ConstantOp::create(builder, loc, type,
cast<mlir::DenseElementsAttr>(value));
}
```
diff --git a/mlir/examples/toy/Ch2/include/toy/Ops.td b/mlir/examples/toy/Ch2/include/toy/Ops.td
index ef65c9c8d682b..91bf83a54df1a 100644
--- a/mlir/examples/toy/Ch2/include/toy/Ops.td
+++ b/mlir/examples/toy/Ch2/include/toy/Ops.td
@@ -70,7 +70,7 @@ def ConstantOp : Toy_Op<"constant", [Pure]> {
// Add custom build methods for the constant operation. These method populates
// the `state` that MLIR uses to create operations, i.e. these are used when
- // using `builder.create<ConstantOp>(...)`.
+ // using `ConstantOp::create(builder, ...)`.
let builders = [
// Build a constant with a given constant tensor value.
OpBuilder<(ins "DenseElementsAttr":$value), [{
diff --git a/mlir/examples/toy/Ch2/mlir/MLIRGen.cpp b/mlir/examples/toy/Ch2/mlir/MLIRGen.cpp
index 96925bebf07b7..39ae6a016eb41 100644
--- a/mlir/examples/toy/Ch2/mlir/MLIRGen.cpp
+++ b/mlir/examples/toy/Ch2/mlir/MLIRGen.cpp
@@ -121,8 +121,8 @@ class MLIRGenImpl {
llvm::SmallVector<mlir::Type, 4> argTypes(proto.getArgs().size(),
getType(VarType{}));
auto funcType = builder.getFunctionType(argTypes, {});
- return builder.create<mlir::toy::FuncOp>(location, proto.getName(),
- funcType);
+ return mlir::toy::FuncOp::create(builder, location, proto.getName(),
+ funcType);
}
/// Emit a new function and add it to the MLIR module.
@@ -166,7 +166,7 @@ class MLIRGenImpl {
if (!entryBlock.empty())
returnOp = dyn_cast<ReturnOp>(entryBlock.back());
if (!returnOp) {
- builder.create<ReturnOp>(loc(funcAST.getProto()->loc()));
+ ReturnOp::create(builder, loc(funcAST.getProto()->loc()));
} else if (returnOp.hasOperand()) {
// Otherwise, if this return operation has an operand then add a result to
// the function.
@@ -202,9 +202,9 @@ class MLIRGenImpl {
// support '+' and '*'.
switch (binop.getOp()) {
case '+':
- return builder.create<AddOp>(location, lhs, rhs);
+ return AddOp::create(builder, location, lhs, rhs);
case '*':
- return builder.create<MulOp>(location, lhs, rhs);
+ return MulOp::create(builder, location, lhs, rhs);
}
emitError(location, "invalid binary operator '") << binop.getOp() << "'";
@@ -235,8 +235,8 @@ class MLIRGenImpl {
}
// Otherwise, this return operation has zero operands.
- builder.create<ReturnOp>(location,
- expr ? ArrayRef(expr) : ArrayRef<mlir::Value>());
+ ReturnOp::create(builder, location,
+ expr ? ArrayRef(expr) : ArrayRef<mlir::Value>());
return mlir::success();
}
@@ -280,7 +280,7 @@ class MLIRGenImpl {
// Build the MLIR op `toy.constant`. This invokes the `ConstantOp::build`
// method.
- return builder.create<ConstantOp>(loc(lit.loc()), type, dataAttribute);
+ return ConstantOp::create(builder, loc(lit.loc()), type, dataAttribute);
}
/// Recursive helper function to accumulate the data that compose an array
@@ -325,13 +325,13 @@ class MLIRGenImpl {
"does not accept multiple arguments");
return nullptr;
}
- return builder.create<TransposeOp>(location, operands[0]);
+ return TransposeOp::create(builder, location, operands[0]);
}
// Otherwise this is a call to a user-defined function. Calls to
// user-defined functions are mapped to a custom call that takes the callee
// name as an attribute.
- return builder.create<GenericCallOp>(location, callee, operands);
+ return GenericCallOp::create(builder, location, callee, operands);
}
/// Emit a print expression. It emits specific operations for two builtins:
@@ -341,13 +341,13 @@ class MLIRGenImpl {
if (!arg)
return mlir::failure();
- builder.create<PrintOp>(loc(call.loc()), arg);
+ PrintOp::create(builder, loc(call.loc()), arg);
return mlir::success();
}
/// Emit a constant for a single number (FIXME: semantic? broadcast?)
mlir::Value mlirGen(NumberExprAST &num) {
- return builder.create<ConstantOp>(loc(num.loc()), num.getValue());
+ return ConstantOp::create(builder, loc(num.loc()), num.getValue());
}
/// Dispatch codegen for the right expression subclass using RTTI.
@@ -391,8 +391,8 @@ class MLIRGenImpl {
// with specific shape, we emit a "reshape" operation. It will get
// optimized out later as needed.
if (!vardecl.getType().shape.empty()) {
- value = builder.create<ReshapeOp>(loc(vardecl.loc()),
- getType(vardecl.getType()), value);
+ value = ReshapeOp::create(builder, loc(vardecl.loc()),
+ getType(vardecl.getType()), value);
}
// Register the value in the symbol table.
diff --git a/mlir/examples/toy/Ch3/include/toy/Ops.td b/mlir/examples/toy/Ch3/include/toy/Ops.td
index 485980420a20b..027b076af9e63 100644
--- a/mlir/examples/toy/Ch3/include/toy/Ops.td
+++ b/mlir/examples/toy/Ch3/include/toy/Ops.td
@@ -69,7 +69,7 @@ def ConstantOp : Toy_Op<"constant", [Pure]> {
// Add custom build methods for the constant operation. These method populates
// the `state` that MLIR uses to create operations, i.e. these are used when
- // using `builder.create<ConstantOp>(...)`.
+ // using `ConstantOp::create(builder, ...)`.
let builders = [
// Build a constant with a given constant tensor value.
OpBuilder<(ins "DenseElementsAttr":$value), [{
diff --git a/mlir/examples/toy/Ch3/mlir/MLIRGen.cpp b/mlir/examples/toy/Ch3/mlir/MLIRGen.cpp
index c8cba82cb63a0..0573af699c1f4 100644
--- a/mlir/examples/toy/Ch3/mlir/MLIRGen.cpp
+++ b/mlir/examples/toy/Ch3/mlir/MLIRGen.cpp
@@ -121,8 +121,8 @@ class MLIRGenImpl {
llvm::SmallVector<mlir::Type, 4> argTypes(proto.getArgs().size(),
getType(VarType{}));
auto funcType = builder.getFunctionType(argTypes, /*results=*/{});
- return builder.create<mlir::toy::FuncOp>(location, proto.getName(),
- funcType);
+ return mlir::toy::FuncOp::create(builder, location, proto.getName(),
+ funcType);
}
/// Emit a new function and add it to the MLIR module.
@@ -166,7 +166,7 @@ class MLIRGenImpl {
if (!entryBlock.empty())
returnOp = dyn_cast<ReturnOp>(entryBlock.back());
if (!returnOp) {
- builder.create<ReturnOp>(loc(funcAST.getProto()->loc()));
+ ReturnOp::create(builder, loc(funcAST.getProto()->loc()));
} else if (returnOp.hasOperand()) {
// Otherwise, if this return operation has an operand then add a result to
// the function.
@@ -202,9 +202,9 @@ class MLIRGenImpl {
// support '+' and '*'.
switch (binop.getOp()) {
case '+':
- return builder.create<AddOp>(location, lhs, rhs);
+ return AddOp::create(builder, location, lhs, rhs);
case '*':
- return builder.create<MulOp>(location, lhs, rhs);
+ return MulOp::create(builder, location, lhs, rhs);
}
emitError(location, "invalid binary operator '") << binop.getOp() << "'";
@@ -235,8 +235,8 @@ class MLIRGenImpl {
}
// Otherwise, this return operation has zero operands.
- builder.create<ReturnOp>(location,
- expr ? ArrayRef(expr) : ArrayRef<mlir::Value>());
+ ReturnOp::create(builder, location,
+ expr ? ArrayRef(expr) : ArrayRef<mlir::Value>());
return mlir::success();
}
@@ -280,7 +280,7 @@ class MLIRGenImpl {
// Build the MLIR op `toy.constant`. This invokes the `ConstantOp::build`
// method.
- return builder.create<ConstantOp>(loc(lit.loc()), type, dataAttribute);
+ return ConstantOp::create(builder, loc(lit.loc()), type, dataAttribute);
}
/// Recursive helper function to accumulate the data that compose an array
@@ -325,13 +325,13 @@ class MLIRGenImpl {
"does not accept multiple arguments");
return nullptr;
}
- return builder.create<TransposeOp>(location, operands[0]);
+ return TransposeOp::create(builder, location, operands[0]);
}
// Otherwise this is a call to a user-defined function. Calls to
// user-defined functions are mapped to a custom call that takes the callee
// name as an attribute.
- return builder.create<GenericCallOp>(location, callee, operands);
+ return GenericCallOp::create(builder, location, callee, operands);
}
/// Emit a print expression. It emits specific operations for two builtins:
@@ -341,13 +341,13 @@ class MLIRGenImpl {
if (!arg)
return mlir::failure();
- builder.create<PrintOp>(loc(call.loc()), arg);
+ PrintOp::create(builder, loc(call.loc()), arg);
return mlir::success();
}
/// Emit a constant for a single number (FIXME: semantic? broadcast?)
mlir::Value mlirGen(NumberExprAST &num) {
- return builder.create<ConstantOp>(loc(num.loc()), num.getValue());
+ return ConstantOp::create(builder, loc(num.loc()), num.getValue());
}
/// Dispatch codegen for the right expression subclass using RTTI.
@@ -391,8 +391,8 @@ class MLIRGenImpl {
// with specific shape, we emit a "reshape" operation. It will get
// optimized out later as needed.
if (!vardecl.getType().shape.empty()) {
- value = builder.create<ReshapeOp>(loc(vardecl.loc()),
- getType(vardecl.getType()), value);
+ value = ReshapeOp::create(builder, loc(vardecl.loc()),
+ getType(vardecl.getType()), value);
}
// Register the value in the symbol table.
diff --git a/mlir/examples/toy/Ch4/include/toy/Ops.td b/mlir/examples/toy/Ch4/include/toy/Ops.td
index 0b32b1b0c7726..6c6b73937aaf8 100644
--- a/mlir/examples/toy/Ch4/include/toy/Ops.td
+++ b/mlir/examples/toy/Ch4/include/toy/Ops.td
@@ -72,7 +72,7 @@ def ConstantOp : Toy_Op<"constant", [Pure]> {
// Add custom build methods for the constant operation. These method populates
// the `state` that MLIR uses to create operations, i.e. these are used when
- // using `builder.create<ConstantOp>(...)`.
+ // using `ConstantOp::create(builder, ...)`.
let builders = [
// Build a constant with a given constant tensor value.
OpBuilder<(ins "DenseElementsAttr":$value), [{
diff --git a/mlir/examples/toy/Ch4/mlir/Dialect.cpp b/mlir/examples/toy/Ch4/mlir/Dialect.cpp
index 076a75a26619b..1e5e67296a753 100644
--- a/mlir/examples/toy/Ch4/mlir/Dialect.cpp
+++ b/mlir/examples/toy/Ch4/mlir/Dialect.cpp
@@ -91,7 +91,7 @@ struct ToyInlinerInterface : public DialectInlinerInterface {
Operation *materializeCallConversion(OpBuilder &builder, Value input,
Type resultType,
Location conversionLoc) const final {
- return builder.create<CastOp>(conversionLoc, resultType, input);
+ return CastOp::create(builder, conversionLoc, resultType, input);
}
};
@@ -206,7 +206,8 @@ void ConstantOp::print(mlir::OpAsmPrinter &printer) {
llvm::LogicalResult ConstantOp::verify() {
// If the return type of the constant is not an unranked tensor, the shape
// must match the shape of the attribute holding the data.
- auto resultType = llvm::dyn_cast<mlir::RankedTensorType>(getResult().getType());
+ auto resultType =
+ llvm::dyn_cast<mlir::RankedTensorType>(getResult().getType());
if (!resultType)
return success();
@@ -395,7 +396,8 @@ llvm::LogicalResult ReturnOp::verify() {
auto resultType = results.front();
// Check that the result type of the function matches the operand type.
- if (inputType == resultType || llvm::isa<mlir::UnrankedTensorType>(inputType) ||
+ if (inputType == resultType ||
+ llvm::isa<mlir::UnrankedTensorType>(inputType) ||
llvm::isa<mlir::UnrankedTensorType>(resultType))
return mlir::success();
diff --git a/mlir/examples/toy/Ch4/mlir/MLIRGen.cpp b/mlir/examples/toy/Ch4/mlir/MLIRGen.cpp
index 9371815577b1b..7d676f1b39200 100644
--- a/mlir/examples/toy/Ch4/mlir/MLIRGen.cpp
+++ b/mlir/examples/toy/Ch4/mlir/MLIRGen.cpp
@@ -121,8 +121,8 @@ class MLIRGenImpl {
llvm::SmallVector<mlir::Type, 4> argTypes(proto.getArgs().size(),
getType(VarType{}));
auto funcType = builder.getFunctionType(argTypes, /*results=*/{});
- return builder.create<mlir::toy::FuncOp>(location, proto.getName(),
- funcType);
+ return mlir::toy::FuncOp::create(builder, location, proto.getName(),
+ funcType);
}
/// Emit a new function and add it to the MLIR module.
@@ -166,7 +166,7 @@ class MLIRGenImpl {
if (!entryBlock.em...
[truncated]
``````````
</details>
https://github.com/llvm/llvm-project/pull/150652
More information about the Mlir-commits
mailing list