[Mlir-commits] [mlir] 2049b2a - [MLIR] Fix compiler warnings (NFC)
Lorenzo Chelini
llvmlistbot at llvm.org
Wed Jul 5 00:50:02 PDT 2023
Author: Lorenzo Chelini
Date: 2023-07-05T09:49:57+02:00
New Revision: 2049b2adfe573853c48e797927485c4c6ea97203
URL: https://github.com/llvm/llvm-project/commit/2049b2adfe573853c48e797927485c4c6ea97203
DIFF: https://github.com/llvm/llvm-project/commit/2049b2adfe573853c48e797927485c4c6ea97203.diff
LOG: [MLIR] Fix compiler warnings (NFC)
In `TestTensorTransforms.cpp` `replaced` is nullptr I assumed the intent
was to emit the error for the `rootOp`.
In `TransformInterfaces.cpp` there were some uninitialized variables.
In `NVGPUTransformOps.cpp` `matmulOp` was never used.
Reviewed By: ftynse
Differential Revision: https://reviews.llvm.org/D154439
Added:
Modified:
mlir/lib/Dialect/Linalg/Transforms/Padding.cpp
mlir/lib/Dialect/NVGPU/TransformOps/NVGPUTransformOps.cpp
mlir/lib/Dialect/Transform/IR/TransformInterfaces.cpp
mlir/test/lib/Dialect/Tensor/TestTensorTransforms.cpp
Removed:
################################################################################
diff --git a/mlir/lib/Dialect/Linalg/Transforms/Padding.cpp b/mlir/lib/Dialect/Linalg/Transforms/Padding.cpp
index 9b550406e8a764..98e9c81e4a3974 100644
--- a/mlir/lib/Dialect/Linalg/Transforms/Padding.cpp
+++ b/mlir/lib/Dialect/Linalg/Transforms/Padding.cpp
@@ -99,9 +99,10 @@ static LogicalResult computePaddedShape(linalg::LinalgOp opToPad,
static FailureOr<Value> padOperandToSmallestStaticBoundingBox(
RewriterBase &rewriter, linalg::LinalgOp opToPad, OpOperand *opOperand,
const LinalgPaddingOptions &options) {
- assert(!options.padToMultipleOf.has_value() ||
- options.padToMultipleOf->size() == options.paddingDimensions.size() &&
- "invalid number of elements in padToMultipleOf");
+ assert(
+ (!options.padToMultipleOf.has_value() ||
+ options.padToMultipleOf->size() == options.paddingDimensions.size()) &&
+ "invalid number of elements in padToMultipleOf");
// Compute padded shape.
SmallVector<int64_t> paddedShape;
diff --git a/mlir/lib/Dialect/NVGPU/TransformOps/NVGPUTransformOps.cpp b/mlir/lib/Dialect/NVGPU/TransformOps/NVGPUTransformOps.cpp
index b08b105d91e199..3bc64f742c3a6f 100644
--- a/mlir/lib/Dialect/NVGPU/TransformOps/NVGPUTransformOps.cpp
+++ b/mlir/lib/Dialect/NVGPU/TransformOps/NVGPUTransformOps.cpp
@@ -438,7 +438,7 @@ DiagnosedSilenceableFailure transform::RewriteMatmulAsMmaSyncOp::applyToOne(
transform::TransformState &state) {
bool fail = true;
// TODO: more robust detection of matmulOp, with transposes etc.
- if (auto matmulOp = isa<linalg::MatmulOp>(linalgOp.getOperation())) {
+ if (isa_and_nonnull<linalg::MatmulOp>(linalgOp.getOperation())) {
Location loc = linalgOp.getLoc();
// TODO: more robust computation of laneId, for now assume a single warp.
Value laneId = rewriter.create<gpu::ThreadIdOp>(
diff --git a/mlir/lib/Dialect/Transform/IR/TransformInterfaces.cpp b/mlir/lib/Dialect/Transform/IR/TransformInterfaces.cpp
index 961cf34f0ee9aa..889be71c1f34cb 100644
--- a/mlir/lib/Dialect/Transform/IR/TransformInterfaces.cpp
+++ b/mlir/lib/Dialect/Transform/IR/TransformInterfaces.cpp
@@ -521,7 +521,9 @@ void transform::TransformState::recordValueHandleInvalidationByOpHandleOne(
for (Operation *ancestor : potentialAncestors) {
Operation *definingOp;
std::optional<unsigned> resultNo;
- unsigned argumentNo, blockNo, regionNo;
+ unsigned argumentNo = std::numeric_limits<unsigned>::max();
+ unsigned blockNo = std::numeric_limits<unsigned>::max();
+ unsigned regionNo = std::numeric_limits<unsigned>::max();
if (auto opResult = llvm::dyn_cast<OpResult>(payloadValue)) {
definingOp = opResult.getOwner();
resultNo = opResult.getResultNumber();
diff --git a/mlir/test/lib/Dialect/Tensor/TestTensorTransforms.cpp b/mlir/test/lib/Dialect/Tensor/TestTensorTransforms.cpp
index baa19a980146eb..3c510c18996b0c 100644
--- a/mlir/test/lib/Dialect/Tensor/TestTensorTransforms.cpp
+++ b/mlir/test/lib/Dialect/Tensor/TestTensorTransforms.cpp
@@ -317,7 +317,7 @@ static LogicalResult testTrackingListenerReplacements(Operation *rootOp) {
if (status.wasInterrupted())
return failure();
if (!replaced) {
- replaced->emitError("could not find 'replaced' op");
+ rootOp->emitError("could not find 'replaced' op");
return failure();
}
More information about the Mlir-commits
mailing list