[PATCH] D82250: [flang] Shape analysis for result of MATMUL

Peter Klausler via Phabricator via llvm-commits llvm-commits at lists.llvm.org
Mon Jun 22 10:13:29 PDT 2020


This revision was automatically updated to reflect the committed changes.
Closed by commit rG16d24e45430c: [flang] Shape analysis for result of MATMUL (authored by klausler).

Repository:
  rG LLVM Github Monorepo

CHANGES SINCE LAST ACTION
  https://reviews.llvm.org/D82250/new/

https://reviews.llvm.org/D82250

Files:
  flang/lib/Evaluate/intrinsics.cpp
  flang/lib/Evaluate/shape.cpp
  flang/lib/Semantics/check-call.cpp


Index: flang/lib/Semantics/check-call.cpp
===================================================================
--- flang/lib/Semantics/check-call.cpp
+++ flang/lib/Semantics/check-call.cpp
@@ -607,7 +607,8 @@
                 // ok
               } else {
                 messages.Say(
-                    "Actual argument is not a variable or typed expression"_err_en_US);
+                    "Actual argument '%s' associated with %s is not a variable or typed expression"_err_en_US,
+                    expr->AsFortran(), dummyName);
               }
             } else {
               const Symbol &assumed{DEREF(arg.GetAssumedTypeDummy())};
Index: flang/lib/Evaluate/shape.cpp
===================================================================
--- flang/lib/Evaluate/shape.cpp
+++ flang/lib/Evaluate/shape.cpp
@@ -545,6 +545,23 @@
       if (!call.arguments().empty()) {
         return (*this)(call.arguments()[0]);
       }
+    } else if (intrinsic->name == "matmul") {
+      if (call.arguments().size() == 2) {
+        if (auto ashape{(*this)(call.arguments()[0])}) {
+          if (auto bshape{(*this)(call.arguments()[1])}) {
+            if (ashape->size() == 1 && bshape->size() == 2) {
+              bshape->erase(bshape->begin());
+              return std::move(*bshape); // matmul(vector, matrix)
+            } else if (ashape->size() == 2 && bshape->size() == 1) {
+              ashape->pop_back();
+              return std::move(*ashape); // matmul(matrix, vector)
+            } else if (ashape->size() == 2 && bshape->size() == 2) {
+              (*ashape)[1] = std::move((*bshape)[1]);
+              return std::move(*ashape); // matmul(matrix, matrix)
+            }
+          }
+        }
+      }
     } else if (intrinsic->name == "reshape") {
       if (call.arguments().size() >= 2 && call.arguments().at(1)) {
         // SHAPE(RESHAPE(array,shape)) -> shape
Index: flang/lib/Evaluate/intrinsics.cpp
===================================================================
--- flang/lib/Evaluate/intrinsics.cpp
+++ flang/lib/Evaluate/intrinsics.cpp
@@ -496,28 +496,28 @@
     {"logical", {{"l", AnyLogical}, DefaultingKIND}, KINDLogical},
     {"log_gamma", {{"x", SameReal}}, SameReal},
     {"matmul",
-        {{"array_a", AnyLogical, Rank::vector},
-            {"array_b", AnyLogical, Rank::matrix}},
+        {{"matrix_a", AnyLogical, Rank::vector},
+            {"matrix_b", AnyLogical, Rank::matrix}},
         ResultLogical, Rank::vector, IntrinsicClass::transformationalFunction},
     {"matmul",
-        {{"array_a", AnyLogical, Rank::matrix},
-            {"array_b", AnyLogical, Rank::vector}},
+        {{"matrix_a", AnyLogical, Rank::matrix},
+            {"matrix_b", AnyLogical, Rank::vector}},
         ResultLogical, Rank::vector, IntrinsicClass::transformationalFunction},
     {"matmul",
-        {{"array_a", AnyLogical, Rank::matrix},
-            {"array_b", AnyLogical, Rank::matrix}},
+        {{"matrix_a", AnyLogical, Rank::matrix},
+            {"matrix_b", AnyLogical, Rank::matrix}},
         ResultLogical, Rank::matrix, IntrinsicClass::transformationalFunction},
     {"matmul",
-        {{"array_a", AnyNumeric, Rank::vector},
-            {"array_b", AnyNumeric, Rank::matrix}},
+        {{"matrix_a", AnyNumeric, Rank::vector},
+            {"matrix_b", AnyNumeric, Rank::matrix}},
         ResultNumeric, Rank::vector, IntrinsicClass::transformationalFunction},
     {"matmul",
-        {{"array_a", AnyNumeric, Rank::matrix},
-            {"array_b", AnyNumeric, Rank::vector}},
+        {{"matrix_a", AnyNumeric, Rank::matrix},
+            {"matrix_b", AnyNumeric, Rank::vector}},
         ResultNumeric, Rank::vector, IntrinsicClass::transformationalFunction},
     {"matmul",
-        {{"array_a", AnyNumeric, Rank::matrix},
-            {"array_b", AnyNumeric, Rank::matrix}},
+        {{"matrix_a", AnyNumeric, Rank::matrix},
+            {"matrix_b", AnyNumeric, Rank::matrix}},
         ResultNumeric, Rank::matrix, IntrinsicClass::transformationalFunction},
     {"maskl", {{"i", AnyInt}, DefaultingKIND}, KINDInt},
     {"maskr", {{"i", AnyInt}, DefaultingKIND}, KINDInt},
@@ -1904,7 +1904,6 @@
   }
 
   if (call.isSubroutineCall) {
-    parser::Messages buffer;
     auto subrRange{subroutines_.equal_range(call.name)};
     for (auto iter{subrRange.first}; iter != subrRange.second; ++iter) {
       if (auto specificCall{


-------------- next part --------------
A non-text attachment was scrubbed...
Name: D82250.272485.patch
Type: text/x-patch
Size: 4413 bytes
Desc: not available
URL: <http://lists.llvm.org/pipermail/llvm-commits/attachments/20200622/72209156/attachment-0001.bin>


More information about the llvm-commits mailing list