diff --git a/publish_unit_test_results.py b/publish_unit_test_results.py
index 9fff0546..9afe0f29 100644
--- a/publish_unit_test_results.py
+++ b/publish_unit_test_results.py
@@ -36,17 +36,22 @@ def int_opt(string: str) -> Optional[int]:
except ValueError:
return None
- cases = [dict(
- result_file=result_file,
- test_file=case._elem.get('file'),
- line=int_opt(case._elem.get('line')),
- class_name=case.classname,
- test_name=case.name,
- result=case.result._tag if case.result else 'success',
- message=unescape(case.result.message) if case.result else None,
- content=unescape(case.result._elem.text) if case.result else None,
- time=case.time
- ) for result_file, suite in suites for case in suite]
+ cases = [
+ dict(
+ result_file=result_file,
+ test_file=case._elem.get('file'),
+ line=int_opt(case._elem.get('line')),
+ class_name=case.classname,
+ test_name=case.name,
+ result=case.result._tag if case.result else 'success',
+ message=unescape(case.result.message) if case.result and case.result.message is not None else None,
+ content=unescape(case.result._elem.text) if case.result and case.result._elem.text is not None else None,
+ time=case.time
+ )
+ for result_file, suite in suites
+ for case in suite
+ if case.classname is not None and case.name is not None
+ ]
return dict(files=len(files),
# test states and counts from suites
@@ -64,7 +69,7 @@ def get_test_results(parsed_results: Dict[Any, Any], dedup_classes_by_file_name:
cases_skipped = [case for case in cases if case.get('result') == 'skipped']
cases_failures = [case for case in cases if case.get('result') == 'failure']
cases_errors = [case for case in cases if case.get('result') == 'error']
- cases_time = sum([case.get('time') for case in cases])
+ cases_time = sum([case.get('time') or 0 for case in cases])
# group cases by tests
cases_results = defaultdict(lambda: defaultdict(list))
diff --git a/test/files/minimal-attributes.xml b/test/files/minimal-attributes.xml
new file mode 100644
index 00000000..2d99719c
--- /dev/null
+++ b/test/files/minimal-attributes.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/files/no-attributes.xml b/test/files/no-attributes.xml
new file mode 100644
index 00000000..e260ae8d
--- /dev/null
+++ b/test/files/no-attributes.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/test/test_publish.py b/test/test_publish.py
index 654f7a1f..6d512384 100644
--- a/test/test_publish.py
+++ b/test/test_publish.py
@@ -186,7 +186,6 @@ def test_parse_junit_xml_files(self):
time=1.898
)
]))
- self.maxDiff=None
self.assertEqual(parse_junit_xml_files(['files/junit.fail.xml']),
dict(
cases=[
@@ -279,6 +278,73 @@ def test_parse_junit_xml_files(self):
suite_time=2,
suites=1
))
+ self.assertEqual(parse_junit_xml_files(['files/minimal-attributes.xml']),
+ dict(
+ cases=[
+ dict(
+ class_name='ClassName',
+ content=None,
+ result_file='files/minimal-attributes.xml',
+ test_file=None,
+ line=None,
+ message=None,
+ result='success',
+ test_name='test_name',
+ time=None
+ ),
+ dict(
+ class_name='ClassName',
+ content=None,
+ result_file='files/minimal-attributes.xml',
+ test_file=None,
+ line=None,
+ message=None,
+ result='skipped',
+ test_name='skipped_test',
+ time=None
+ ),
+ dict(
+ class_name='ClassName',
+ content=None,
+ result_file='files/minimal-attributes.xml',
+ test_file=None,
+ line=None,
+ message=None,
+ result='failure',
+ test_name='failed_test',
+ time=None
+ ),
+ dict(
+ class_name='ClassName',
+ content=None,
+ result_file='files/minimal-attributes.xml',
+ test_file=None,
+ line=None,
+ message=None,
+ result='error',
+ test_name='error_test',
+ time=None
+ )
+ ],
+ files=1,
+ suite_errors=1,
+ suite_failures=1,
+ suite_skipped=1,
+ suite_tests=4,
+ suite_time=0,
+ suites=1
+ ))
+ self.assertEqual(parse_junit_xml_files(['files/no-attributes.xml']),
+ dict(
+ cases=[],
+ files=1,
+ suite_errors=1,
+ suite_failures=1,
+ suite_skipped=1,
+ suite_tests=4,
+ suite_time=0,
+ suites=1
+ ))
def test_get_test_results(self):
self.assertEqual(get_test_results(dict(cases=[]), False), dict(
@@ -354,7 +420,6 @@ def test_get_test_results(self):
dict(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test5', result='error', time=10),
])
- self.maxDiff = None
self.assertEqual(get_test_results(with_duplicates, False), dict(
cases=10, cases_skipped=3, cases_failures=1, cases_errors=1, cases_time=55,
case_results=dict([
@@ -383,6 +448,20 @@ def test_get_test_results(self):
tests=10, tests_skipped=3, tests_failures=1, tests_errors=1,
))
+ self.assertEqual(get_test_results(dict(cases=[
+ dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='success', time=1),
+ dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='skipped', time=None),
+ dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='failure', time=2),
+ dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='skipped', time=None),
+ ]), False), dict(
+ cases=4, cases_skipped=2, cases_failures=1, cases_errors=0, cases_time=3,
+ case_results=dict([
+ ((None, 'class', 'test1'), dict(success=[dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='success', time=1)], skipped=[dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='skipped', time=None)])),
+ ((None, 'class', 'test2'), dict(failure=[dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='failure', time=2)], skipped=[dict(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='skipped', time=None)])),
+ ]),
+ tests=2, tests_skipped=0, tests_failures=1, tests_errors=0,
+ ))
+
def test_get_stats(self):
self.assertEqual(get_stats(dict()), dict(
files=None,
@@ -917,6 +996,20 @@ def test_get_case_messages(self):
('error', list([
dict(class_name='class1', test_name='test1', file='file1', result='error', message='message5', content='content5'),
])),
+ ])),
+ ('class2::test2', dict([
+ ('success', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None)
+ ])),
+ ('skipped', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None)
+ ])),
+ ('failure', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None)
+ ])),
+ ('error', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None)
+ ])),
]))
])
@@ -950,6 +1043,28 @@ def test_get_case_messages(self):
dict(class_name='class1', test_name='test1', file='file1', result='error', message='message5', content='content5'),
])),
])),
+ ])),
+ ('class2::test2', dict([
+ ('success', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None)
+ ])),
+ ])),
+ ('skipped', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None)
+ ])),
+ ])),
+ ('failure', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None)
+ ])),
+ ])),
+ ('error', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None)
+ ])),
+ ])),
]))
])
@@ -984,6 +1099,28 @@ def test_get_annotation(self):
dict(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='message6')
]))
])),
+ ])),
+ ('class2::test2', dict([
+ ('success', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None)
+ ])),
+ ])),
+ ('skipped', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None)
+ ])),
+ ])),
+ ('failure', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None)
+ ])),
+ ])),
+ ('error', dict([
+ (None, list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None)
+ ])),
+ ])),
]))
])
@@ -992,6 +1129,10 @@ def test_get_annotation(self):
self.assertEqual(dict(path='file1', start_line=123, end_line=123, annotation_level='warning', message='result-file1\nresult-file2\nresult-file3', title='3 out of 6 runs failed: test1 (class1)', raw_details='message4'), get_annotation(messages, 'class1::test1', 'failure', 'message4', report_individual_runs=False))
self.assertEqual(dict(path='file1', start_line=123, end_line=123, annotation_level='failure', message='result-file1', title='1 out of 6 runs with error: test1 (class1)', raw_details='message5'), get_annotation(messages, 'class1::test1', 'error', 'message5', report_individual_runs=False))
+ self.assertEqual(dict(path='class2', start_line=0, end_line=0, annotation_level='notice', message='result-file1', title='1 out of 4 runs skipped: test2 (class2)', raw_details=None), get_annotation(messages, 'class2::test2', 'skipped', None, report_individual_runs=False))
+ self.assertEqual(dict(path='class2', start_line=0, end_line=0, annotation_level='warning', message='result-file1', title='1 out of 4 runs failed: test2 (class2)', raw_details=None), get_annotation(messages, 'class2::test2', 'failure', None, report_individual_runs=False))
+ self.assertEqual(dict(path='class2', start_line=0, end_line=0, annotation_level='failure', message='result-file1', title='1 out of 4 runs with error: test2 (class2)', raw_details=None), get_annotation(messages, 'class2::test2', 'error', None, report_individual_runs=False))
+
def test_get_annotation_report_individual_runs(self):
messages = dict([
('class1::test1', dict([
@@ -1044,6 +1185,20 @@ def test_get_annotations(self):
('error', list([
dict(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='error message', content='error content')
])),
+ ])),
+ ('class2::test2', dict([
+ ('success', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None)
+ ])),
+ ('skipped', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None)
+ ])),
+ ('failure', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None)
+ ])),
+ ('error', list([
+ dict(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None)
+ ])),
]))
])
@@ -1064,7 +1219,24 @@ def test_get_annotations(self):
start_line=123,
title='1 out of 6 runs with error: test1 (class1)',
raw_details='error content'
- )
+ ),
+ dict(
+ annotation_level='warning',
+ end_line=0,
+ message='result-file1',
+ path='class2',
+ start_line=0,
+ title='1 out of 4 runs failed: test2 (class2)',
+ raw_details=None
+ ), dict(
+ annotation_level='failure',
+ end_line=0,
+ message='result-file1',
+ path='class2',
+ start_line=0,
+ title='1 out of 4 runs with error: test2 (class2)',
+ raw_details=None
+ ),
]
annotations = get_annotations(results, report_individual_runs=False)