[llvm] r332056 - [LIT] Move xunit tests tests into their own location, and and add failures

Chris Matthews via llvm-commits llvm-commits at lists.llvm.org
Thu May 10 15:51:28 PDT 2018


Author: cmatthews
Date: Thu May 10 15:51:28 2018
New Revision: 332056

URL: http://llvm.org/viewvc/llvm-project?rev=332056&view=rev
Log:
[LIT] Move xunit tests tests into their own location, and and add failures

Failures will increase coverage.

Added:
    llvm/trunk/utils/lit/tests/Inputs/xunit-output/
    llvm/trunk/utils/lit/tests/Inputs/xunit-output/bad&name.ini
      - copied, changed from r332055, llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini
    llvm/trunk/utils/lit/tests/Inputs/xunit-output/dummy_format.py
    llvm/trunk/utils/lit/tests/Inputs/xunit-output/lit.cfg
Removed:
    llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini
Modified:
    llvm/trunk/utils/lit/tests/test-output.py
    llvm/trunk/utils/lit/tests/xunit-output.py

Removed: llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/Inputs/test-data/bad%26name.ini?rev=332055&view=auto
==============================================================================
--- llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini (original)
+++ llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini (removed)
@@ -1,7 +0,0 @@
-[global]
-result_code = PASS
-result_output = & < > "
-
-[results]
-value0 = 1
-value1 = 2.3456
\ No newline at end of file

Copied: llvm/trunk/utils/lit/tests/Inputs/xunit-output/bad&name.ini (from r332055, llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini)
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/Inputs/xunit-output/bad%26name.ini?p2=llvm/trunk/utils/lit/tests/Inputs/xunit-output/bad%26name.ini&p1=llvm/trunk/utils/lit/tests/Inputs/test-data/bad%26name.ini&r1=332055&r2=332056&rev=332056&view=diff
==============================================================================
--- llvm/trunk/utils/lit/tests/Inputs/test-data/bad&name.ini (original)
+++ llvm/trunk/utils/lit/tests/Inputs/xunit-output/bad&name.ini Thu May 10 15:51:28 2018
@@ -1,5 +1,5 @@
 [global]
-result_code = PASS
+result_code = FAIL
 result_output = & < > "
 
 [results]

Added: llvm/trunk/utils/lit/tests/Inputs/xunit-output/dummy_format.py
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/Inputs/xunit-output/dummy_format.py?rev=332056&view=auto
==============================================================================
--- llvm/trunk/utils/lit/tests/Inputs/xunit-output/dummy_format.py (added)
+++ llvm/trunk/utils/lit/tests/Inputs/xunit-output/dummy_format.py Thu May 10 15:51:28 2018
@@ -0,0 +1,38 @@
+import os
+try:
+    import ConfigParser
+except ImportError:
+    import configparser as ConfigParser
+
+import lit.formats
+import lit.Test
+
+class DummyFormat(lit.formats.FileBasedTest):
+    def execute(self, test, lit_config):
+        # In this dummy format, expect that each test file is actually just a
+        # .ini format dump of the results to report.
+
+        source_path = test.getSourcePath()
+
+        cfg = ConfigParser.ConfigParser()
+        cfg.read(source_path)
+
+        # Create the basic test result.
+        result_code = cfg.get('global', 'result_code')
+        result_output = cfg.get('global', 'result_output')
+        result = lit.Test.Result(getattr(lit.Test, result_code),
+                                 result_output)
+
+        # Load additional metrics.
+        for key,value_str in cfg.items('results'):
+            value = eval(value_str)
+            if isinstance(value, int):
+                metric = lit.Test.IntMetricValue(value)
+            elif isinstance(value, float):
+                metric = lit.Test.RealMetricValue(value)
+            else:
+                raise RuntimeError("unsupported result type")
+            result.addMetric(key, metric)
+
+        return result
+

Added: llvm/trunk/utils/lit/tests/Inputs/xunit-output/lit.cfg
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/Inputs/xunit-output/lit.cfg?rev=332056&view=auto
==============================================================================
--- llvm/trunk/utils/lit/tests/Inputs/xunit-output/lit.cfg (added)
+++ llvm/trunk/utils/lit/tests/Inputs/xunit-output/lit.cfg Thu May 10 15:51:28 2018
@@ -0,0 +1,10 @@
+import site
+site.addsitedir(os.path.dirname(__file__))
+import dummy_format
+
+config.name = 'test-data'
+config.suffixes = ['.ini']
+config.test_format = dummy_format.DummyFormat()
+config.test_source_root = None
+config.test_exec_root = None
+config.target_triple = None

Modified: llvm/trunk/utils/lit/tests/test-output.py
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/test-output.py?rev=332056&r1=332055&r2=332056&view=diff
==============================================================================
--- llvm/trunk/utils/lit/tests/test-output.py (original)
+++ llvm/trunk/utils/lit/tests/test-output.py Thu May 10 15:51:28 2018
@@ -12,8 +12,8 @@
 # CHECK-NEXT:       "value0": 1,
 # CHECK-NEXT:       "value1": 2.3456
 # CHECK-NEXT:     }
-# CHECK:     "name": "test-data :: bad&name.ini",
-# CHECK:     "output": "& < > \""
-
-# CHECK: ]
+# CHECK-NEXT:     "name": "test-data :: metrics.ini",
+# CHECK-NEXT:     "output": "Test passed."
+# CHECK-NEXT:   }
+# CHECK-NEXT: ]
 # CHECK-NEXT: }

Modified: llvm/trunk/utils/lit/tests/xunit-output.py
URL: http://llvm.org/viewvc/llvm-project/llvm/trunk/utils/lit/tests/xunit-output.py?rev=332056&r1=332055&r2=332056&view=diff
==============================================================================
--- llvm/trunk/utils/lit/tests/xunit-output.py (original)
+++ llvm/trunk/utils/lit/tests/xunit-output.py Thu May 10 15:51:28 2018
@@ -1,11 +1,13 @@
 # Check xunit output
-# RUN: %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/test-data
+# RUN: %{lit} --xunit-xml-output %t.xunit.xml %{inputs}/xunit-output || true
 # RUN: FileCheck < %t.xunit.xml %s
 
 # CHECK: <?xml version="1.0" encoding="UTF-8" ?>
 # CHECK: <testsuites>
-# CHECK: <testsuite name='test-data' tests='2' failures='0'>
-# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'/>
-# CHECK: <testcase classname='test-data.test-data' name='metrics.ini' time='{{[0-1]}}.{{[0-9]+}}'/>
+# CHECK: <testsuite name='test-data' tests='1' failures='1'>
+# CHECK: <testcase classname='test-data.test-data' name='bad&name.ini' time='{{[0-1]}}.{{[0-9]+}}'>
+# CHECK-NEXT: 	<failure >
+# CHECK-NEXT:& < > "
+# CHECK-NEXT:</failure>
 # CHECK: </testsuite>
 # CHECK: </testsuites>




More information about the llvm-commits mailing list