[Mlir-commits] [mlir] [mlir][tosa] Add ERROR_IF checks to TRANSPOSE_CONV2D verifier (PR #133234)

Elen Kalda llvmlistbot at llvm.org
Wed Apr 2 02:27:13 PDT 2025


================
@@ -2896,6 +2896,114 @@ LogicalResult TransposeConv2DOp::inferReturnTypeComponents(
 LogicalResult TransposeConv2DOp::verify() {
   if (verifyConvOp(*this).failed() || verifyConvOpModes(*this).failed())
     return failure();
+
+  llvm::ArrayRef<int64_t> strides = getStride();
+  const int64_t strideY = strides[0];
+  const int64_t strideX = strides[1];
+
+  if (strideY < 1 || strideX < 1)
+    return emitOpError("expect all stride values to be >= 1, got [")
+           << strides << "]";
+
+  const RankedTensorType inputType =
+      llvm::dyn_cast<RankedTensorType>(getInput().getType());
+
+  const RankedTensorType outputType =
+      llvm::dyn_cast<RankedTensorType>(getOutput().getType());
+
+  const RankedTensorType weightType =
+      llvm::dyn_cast<RankedTensorType>(getWeight().getType());
+
+  llvm::ArrayRef<int64_t> padding = getOutPad();
+
+  const auto checkPadAgainstKernelDim =
+      [this](const int64_t pad_value, const int64_t kernel_dim_size,
+             const llvm::StringRef pad_name,
+             llvm::StringRef kernel_dim_name) -> LogicalResult {
+    if (pad_value <= -kernel_dim_size)
+      return emitOpError("expected ")
+             << pad_name << " > -" << kernel_dim_name
+             << ", but got: " << pad_name << "=" << pad_value << " and "
+             << kernel_dim_name << "=" << kernel_dim_size;
+    return success();
+  };
+
+  if (outputType) {
+    const int64_t kernelHeight = weightType.getDimSize(1);
+    const int64_t outPadTop = padding[0];
+    const int64_t outPadBottom = padding[1];
+
+    if (!ShapedType::isDynamic(kernelHeight)) {
+      if (failed(checkPadAgainstKernelDim(outPadTop, kernelHeight,
+                                          "out_pad_top", "KH")))
+        return failure();
+      if (failed(checkPadAgainstKernelDim(outPadBottom, kernelHeight,
+                                          "out_pad_bottom", "KH")))
+        return failure();
+
+      const int64_t inputHeight = inputType.getDimSize(1);
+      const int64_t outputHeight = outputType.getDimSize(1);
+
+      if (!ShapedType::isDynamic(inputHeight) &&
+          !ShapedType::isDynamic(outputHeight)) {
+        if (outputHeight != (inputHeight - 1) * strideY + outPadTop +
+                                outPadBottom + kernelHeight)
+          return emitOpError(
+                     "dimension mismatch: expected OH == (IH - 1) * stride_y "
+                     "+ out_pad_top + out_pad_bottom + KH, but got ")
+                 << outputHeight << " != (" << inputHeight << " - 1) * "
+                 << strideY << " + " << outPadTop << " + " << outPadBottom
+                 << " + " << kernelHeight;
+      }
+    }
+
+    const int64_t kernelWidth = weightType.getDimSize(2);
+    const int64_t outPadLeft = padding[2];
+    const int64_t outPadRight = padding[3];
+
+    if (!ShapedType::isDynamic(kernelWidth)) {
+      if (failed(checkPadAgainstKernelDim(outPadLeft, kernelWidth,
+                                          "out_pad_left", "KW")))
+        return failure();
+      if (failed(checkPadAgainstKernelDim(outPadRight, kernelWidth,
----------------
ekalda wrote:

Sure :D

https://github.com/llvm/llvm-project/pull/133234


More information about the Mlir-commits mailing list