[clang] [llvm] Fix report script (PR #116017)

David Spickett via cfe-commits cfe-commits at lists.llvm.org
Wed Nov 13 02:15:49 PST 2024


https://github.com/DavidSpickett created https://github.com/llvm/llvm-project/pull/116017

None

>From 69a3030e61ab5d4483464cd912d78a4e5d24d644 Mon Sep 17 00:00:00 2001
From: David Spickett <david.spickett at linaro.org>
Date: Wed, 13 Nov 2024 09:19:10 +0000
Subject: [PATCH 1/2] Reland "[ci] New script to generate test reports as
 Buildkite Annotations (#113447)"

This reverts commit 8a1ca6cad9cd0e972c322910cdfbbe9552c6c7ca.

I have fixed 2 things:
* The report is now sent by stdin so we do not hit the limit on the size
  of command line arguments.
* The report is limited to 1MB in size and if we exceed that we fall back
  to listing only the totals with a note telling you to check the full log.
---
 .ci/generate_test_report.py | 424 ++++++++++++++++++++++++++++++++++++
 .ci/monolithic-linux.sh     |  10 +-
 .ci/monolithic-windows.sh   |  10 +-
 .ci/requirements.txt        |   1 +
 4 files changed, 441 insertions(+), 4 deletions(-)
 create mode 100644 .ci/generate_test_report.py
 create mode 100644 .ci/requirements.txt

diff --git a/.ci/generate_test_report.py b/.ci/generate_test_report.py
new file mode 100644
index 00000000000000..b2ab81ae4e015e
--- /dev/null
+++ b/.ci/generate_test_report.py
@@ -0,0 +1,424 @@
+# Script to parse many JUnit XML result files and send a report to the buildkite
+# agent as an annotation.
+#
+# To run the unittests:
+# python3 -m unittest discover -p generate_test_report.py
+
+import argparse
+import subprocess
+import unittest
+from io import StringIO
+from junitparser import JUnitXml, Failure
+from textwrap import dedent
+
+
+def junit_from_xml(xml):
+    return JUnitXml.fromfile(StringIO(xml))
+
+
+class TestReports(unittest.TestCase):
+    def test_title_only(self):
+        self.assertEqual(_generate_report("Foo", []), ("", None))
+
+    def test_no_tests_in_testsuite(self):
+        self.assertEqual(
+            _generate_report(
+                "Foo",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="0.00">
+          <testsuite name="Empty" tests="0" failures="0" skipped="0" time="0.00">
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+            ),
+            ("", None),
+        )
+
+    def test_no_failures(self):
+        self.assertEqual(
+            _generate_report(
+                "Foo",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="0.00">
+          <testsuite name="Passed" tests="1" failures="0" skipped="0" time="0.00">
+          <testcase classname="Bar/test_1" name="test_1" time="0.00"/>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+            ),
+            (
+                dedent(
+                    """\
+              # Foo
+
+              * 1 test passed"""
+                ),
+                "success",
+            ),
+        )
+
+    def test_report_single_file_single_testsuite(self):
+        self.assertEqual(
+            _generate_report(
+                "Foo",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="8.89">
+          <testsuite name="Bar" tests="4" failures="2" skipped="1" time="410.63">
+          <testcase classname="Bar/test_1" name="test_1" time="0.02"/>
+          <testcase classname="Bar/test_2" name="test_2" time="0.02">
+            <skipped message="Reason"/>
+          </testcase>
+          <testcase classname="Bar/test_3" name="test_3" time="0.02">
+            <failure><![CDATA[Output goes here]]></failure>
+          </testcase>
+          <testcase classname="Bar/test_4" name="test_4" time="0.02">
+            <failure><![CDATA[Other output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+            ),
+            (
+                dedent(
+                    """\
+          # Foo
+
+          * 1 test passed
+          * 1 test skipped
+          * 2 tests failed
+
+          ## Failed Tests
+          (click to see output)
+
+          ### Bar
+          <details>
+          <summary>Bar/test_3/test_3</summary>
+
+          ```
+          Output goes here
+          ```
+          </details>
+          <details>
+          <summary>Bar/test_4/test_4</summary>
+
+          ```
+          Other output goes here
+          ```
+          </details>"""
+                ),
+                "error",
+            ),
+        )
+
+    MULTI_SUITE_OUTPUT = (
+        dedent(
+            """\
+        # ABC and DEF
+
+        * 1 test passed
+        * 1 test skipped
+        * 2 tests failed
+
+        ## Failed Tests
+        (click to see output)
+
+        ### ABC
+        <details>
+        <summary>ABC/test_2/test_2</summary>
+
+        ```
+        ABC/test_2 output goes here
+        ```
+        </details>
+
+        ### DEF
+        <details>
+        <summary>DEF/test_2/test_2</summary>
+
+        ```
+        DEF/test_2 output goes here
+        ```
+        </details>"""
+        ),
+        "error",
+    )
+
+    def test_report_single_file_multiple_testsuites(self):
+        self.assertEqual(
+            _generate_report(
+                "ABC and DEF",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="8.89">
+          <testsuite name="ABC" tests="2" failures="1" skipped="0" time="410.63">
+          <testcase classname="ABC/test_1" name="test_1" time="0.02"/>
+          <testcase classname="ABC/test_2" name="test_2" time="0.02">
+            <failure><![CDATA[ABC/test_2 output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          <testsuite name="DEF" tests="2" failures="1" skipped="1" time="410.63">
+          <testcase classname="DEF/test_1" name="test_1" time="0.02">
+            <skipped message="reason"/>
+          </testcase>
+          <testcase classname="DEF/test_2" name="test_2" time="0.02">
+            <failure><![CDATA[DEF/test_2 output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+            ),
+            self.MULTI_SUITE_OUTPUT,
+        )
+
+    def test_report_multiple_files_multiple_testsuites(self):
+        self.assertEqual(
+            _generate_report(
+                "ABC and DEF",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="8.89">
+          <testsuite name="ABC" tests="2" failures="1" skipped="0" time="410.63">
+          <testcase classname="ABC/test_1" name="test_1" time="0.02"/>
+          <testcase classname="ABC/test_2" name="test_2" time="0.02">
+            <failure><![CDATA[ABC/test_2 output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    ),
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="8.89">
+          <testsuite name="DEF" tests="2" failures="1" skipped="1" time="410.63">
+          <testcase classname="DEF/test_1" name="test_1" time="0.02">
+            <skipped message="reason"/>
+          </testcase>
+          <testcase classname="DEF/test_2" name="test_2" time="0.02">
+            <failure><![CDATA[DEF/test_2 output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    ),
+                ],
+            ),
+            self.MULTI_SUITE_OUTPUT,
+        )
+
+    def test_report_dont_list_failures(self):
+        self.assertEqual(
+            _generate_report(
+                "Foo",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="0.02">
+          <testsuite name="Bar" tests="1" failures="1" skipped="0" time="0.02">
+          <testcase classname="Bar/test_1" name="test_1" time="0.02">
+            <failure><![CDATA[Output goes here]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+                list_failures=False,
+            ),
+            (
+                dedent(
+                    """\
+          # Foo
+
+          * 1 test failed
+
+          Failed tests and their output was too large to report. Download the build's log file to see the details."""
+                ),
+                "error",
+            ),
+        )
+
+    def test_report_size_limit(self):
+        self.assertEqual(
+            _generate_report(
+                "Foo",
+                [
+                    junit_from_xml(
+                        dedent(
+                            """\
+          <?xml version="1.0" encoding="UTF-8"?>
+          <testsuites time="0.02">
+          <testsuite name="Bar" tests="1" failures="1" skipped="0" time="0.02">
+          <testcase classname="Bar/test_1" name="test_1" time="0.02">
+            <failure><![CDATA[Some long output goes here...]]></failure>
+          </testcase>
+          </testsuite>
+          </testsuites>"""
+                        )
+                    )
+                ],
+                size_limit=128,
+            ),
+            (
+                dedent(
+                    """\
+          # Foo
+
+          * 1 test failed
+
+          Failed tests and their output was too large to report. Download the build's log file to see the details."""
+                ),
+                "error",
+            ),
+        )
+
+
+# Set size_limit to limit the byte size of the report. The default is 1MB as this
+# is the most that can be put into an annotation. If the generated report exceeds
+# this limit and failures are listed, it will be generated again without failures
+# listed. This minimal report will always fit into an annotation.
+# If include failures is False, total number of test will be reported but their names
+# and output will not be.
+def _generate_report(title, junit_objects, size_limit=1024 * 1024, list_failures=True):
+    style = None
+
+    if not junit_objects:
+        return ("", style)
+
+    failures = {}
+    tests_run = 0
+    tests_skipped = 0
+    tests_failed = 0
+
+    for results in junit_objects:
+        for testsuite in results:
+            tests_run += testsuite.tests
+            tests_skipped += testsuite.skipped
+            tests_failed += testsuite.failures
+
+            for test in testsuite:
+                if (
+                    not test.is_passed
+                    and test.result
+                    and isinstance(test.result[0], Failure)
+                ):
+                    if failures.get(testsuite.name) is None:
+                        failures[testsuite.name] = []
+                    failures[testsuite.name].append(
+                        (test.classname + "/" + test.name, test.result[0].text)
+                    )
+
+    if not tests_run:
+        return ("", style)
+
+    style = "error" if tests_failed else "success"
+    report = [f"# {title}", ""]
+
+    tests_passed = tests_run - tests_skipped - tests_failed
+
+    def plural(num_tests):
+        return "test" if num_tests == 1 else "tests"
+
+    if tests_passed:
+        report.append(f"* {tests_passed} {plural(tests_passed)} passed")
+    if tests_skipped:
+        report.append(f"* {tests_skipped} {plural(tests_skipped)} skipped")
+    if tests_failed:
+        report.append(f"* {tests_failed} {plural(tests_failed)} failed")
+
+    if not list_failures:
+        report.extend(
+            [
+                "",
+                "Failed tests and their output was too large to report. "
+                "Download the build's log file to see the details.",
+            ]
+        )
+    elif failures:
+        report.extend(["", "## Failed Tests", "(click to see output)"])
+
+        for testsuite_name, failures in failures.items():
+            report.extend(["", f"### {testsuite_name}"])
+            for name, output in failures:
+                report.extend(
+                    [
+                        "<details>",
+                        f"<summary>{name}</summary>",
+                        "",
+                        "```",
+                        output,
+                        "```",
+                        "</details>",
+                    ]
+                )
+
+    report = "\n".join(report)
+    if len(report.encode("utf-8")) > size_limit:
+        return _generate_report(title, junit_objects, size_limit, list_failures=False)
+
+    return report, style
+
+
+def generate_report(title, junit_files):
+    return _generate_report(title, [JUnitXml.fromfile(p) for p in junit_files])
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "title", help="Title of the test report, without Markdown formatting."
+    )
+    parser.add_argument("context", help="Annotation context to write to.")
+    parser.add_argument("junit_files", help="Paths to JUnit report files.", nargs="*")
+    args = parser.parse_args()
+
+    report, style = generate_report(args.title, args.junit_files)
+
+    p = subprocess.Popen(
+        [
+            "buildkite-agent",
+            "annotate",
+            "--context",
+            args.context,
+            "--style",
+            style,
+        ],
+        stdin=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        universal_newlines=True,
+    )
+
+    # The report can be larger than the buffer for command arguments so we send
+    # it over stdin instead.
+    _, err = p.communicate(input=report)
+    if p.returncode:
+        raise RuntimeError(f"Failed to send report to buildkite-agent:\n{err}")
diff --git a/.ci/monolithic-linux.sh b/.ci/monolithic-linux.sh
index 17ea51c08fafd3..a4aeea7a16addc 100755
--- a/.ci/monolithic-linux.sh
+++ b/.ci/monolithic-linux.sh
@@ -28,11 +28,16 @@ if [[ -n "${CLEAR_CACHE:-}" ]]; then
   ccache --clear
 fi
 
-function show-stats {
+function at-exit {
   mkdir -p artifacts
   ccache --print-stats > artifacts/ccache_stats.txt
+
+  # If building fails there will be no results files.
+  shopt -s nullglob
+  python3 "${MONOREPO_ROOT}"/.ci/generate_test_report.py ":linux: Linux x64 Test Results" \
+    "linux-x64-test-results" "${BUILD_DIR}"/test-results.*.xml
 }
-trap show-stats EXIT
+trap at-exit EXIT
 
 projects="${1}"
 targets="${2}"
@@ -42,6 +47,7 @@ lit_args="-v --xunit-xml-output ${BUILD_DIR}/test-results.xml --use-unique-outpu
 echo "--- cmake"
 pip install -q -r "${MONOREPO_ROOT}"/mlir/python/requirements.txt
 pip install -q -r "${MONOREPO_ROOT}"/lldb/test/requirements.txt
+pip install -q -r "${MONOREPO_ROOT}"/.ci/requirements.txt
 cmake -S "${MONOREPO_ROOT}"/llvm -B "${BUILD_DIR}" \
       -D LLVM_ENABLE_PROJECTS="${projects}" \
       -G Ninja \
diff --git a/.ci/monolithic-windows.sh b/.ci/monolithic-windows.sh
index 9ec44c22442d06..4ead122212f4f0 100755
--- a/.ci/monolithic-windows.sh
+++ b/.ci/monolithic-windows.sh
@@ -27,17 +27,23 @@ if [[ -n "${CLEAR_CACHE:-}" ]]; then
 fi
 
 sccache --zero-stats
-function show-stats {
+function at-exit {
   mkdir -p artifacts
   sccache --show-stats >> artifacts/sccache_stats.txt
+
+  # If building fails there will be no results files.
+  shopt -s nullglob
+  python "${MONOREPO_ROOT}"/.ci/generate_test_report.py ":windows: Windows x64 Test Results" \
+    "windows-x64-test-results" "${BUILD_DIR}"/test-results.*.xml
 }
-trap show-stats EXIT
+trap at-exit EXIT
 
 projects="${1}"
 targets="${2}"
 
 echo "--- cmake"
 pip install -q -r "${MONOREPO_ROOT}"/mlir/python/requirements.txt
+pip install -q -r "${MONOREPO_ROOT}"/.ci/requirements.txt
 
 # The CMAKE_*_LINKER_FLAGS to disable the manifest come from research
 # on fixing a build reliability issue on the build server, please
diff --git a/.ci/requirements.txt b/.ci/requirements.txt
new file mode 100644
index 00000000000000..ad63858c9fdc2c
--- /dev/null
+++ b/.ci/requirements.txt
@@ -0,0 +1 @@
+junitparser==3.2.0

>From 83a4debfbd8f5e206ca44e0fb6a289838a33399e Mon Sep 17 00:00:00 2001
From: David Spickett <david.spickett at linaro.org>
Date: Wed, 13 Nov 2024 10:15:22 +0000
Subject: [PATCH 2/2] test a failure

---
 .../exploded-graph-rewriter/win_path_forbidden_chars.cpp        | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/clang/test/Analysis/exploded-graph-rewriter/win_path_forbidden_chars.cpp b/clang/test/Analysis/exploded-graph-rewriter/win_path_forbidden_chars.cpp
index 5b955a0aae3ec1..7065a8127b9fa1 100644
--- a/clang/test/Analysis/exploded-graph-rewriter/win_path_forbidden_chars.cpp
+++ b/clang/test/Analysis/exploded-graph-rewriter/win_path_forbidden_chars.cpp
@@ -15,5 +15,5 @@ void test() {
 }
 
 // This test is passed if exploded_graph_rewriter handles dot file without errors.
-// CHECK: DEBUG:root:Line: digraph "Exploded Graph"
+// CHECK: DEBUG:root:Line: digraph "Exploded Graph" -- fail on windows!
 // CHECK: \"file\": \"scratch space\"



More information about the cfe-commits mailing list