Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Assert JSON result of test files #567

Merged
merged 4 commits into from
Feb 3, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
89 changes: 89 additions & 0 deletions python/test/files/dart/json/tests.results.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
{
"title": "2 errors, 1 fail, 1 skipped, 16 pass in 0s",
"summary": "20 tests   16 :white_check_mark:  0s :stopwatch:\n 4 suites   1 :zzz:\n 1 files     1 :x:  2 :fire:\n\nResults for commit commit s.\n",
"conclusion": "failure",
"stats": {
"files": 1,
"errors": [],
"suites": 4,
"duration": 0,
"tests": 20,
"tests_succ": 16,
"tests_skip": 1,
"tests_fail": 1,
"tests_error": 2,
"runs": 20,
"runs_succ": 16,
"runs_skip": 1,
"runs_fail": 1,
"runs_error": 2,
"commit": "commit sha"
},
"annotations": [
{
"path": "file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart",
"start_line": 21,
"end_line": 21,
"annotation_level": "warning",
"message": "json/tests.json [took 0s]",
"title": "Cli runner should have correct invocation failed",
"raw_details": "Expected: 'metrics <command> [arguments] <directories> nope'\n Actual: 'metrics <command> [arguments] <directories>'\n Which: is different. Both strings start the same, but the actual value is missing the following trailing characters: nope\n\npackage:test_api expect\ntest/src/cli/cli_runner_test.dart 22:7 main.<fn>.<fn>"
},
{
"path": "file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart",
"start_line": 16,
"end_line": 16,
"annotation_level": "failure",
"message": "json/tests.json [took 0s]",
"title": "detectSdkPath should return `null` if running inside VM with error",
"raw_details": "Exception: exception\ntest/src/cli/utils/detect_sdk_path_test.dart 21:7 main.<fn>.<fn>"
},
{
"path": "file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart",
"start_line": 46,
"end_line": 46,
"annotation_level": "failure",
"message": "json/tests.json [took 0s]",
"title": "detectSdkPath should return null if sdk path is not found inside environment PATH variable with error",
"raw_details": "Instance of 'Error'\ntest/src/cli/utils/detect_sdk_path_test.dart 67:9 main.<fn>.<fn>"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There is 1 skipped test, see \"Raw output\" for the name of the skipped test.",
"title": "1 skipped test found",
"raw_details": "Cli runner should have correct description"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There are 20 tests, see \"Raw output\" for the full list of tests.",
"title": "20 tests found",
"raw_details": "AnalysisOptions readIterableOfString returns iterables with data or not\nAnalysisOptions readMap returns map with data or not\nAnalysisOptions readMapOfMap returns map with data or not\nAnalysisOptions returns correct \"folderPath\" on posix platforms\nCheckUnnecessaryNullableCommand should have correct description\nCheckUnnecessaryNullableCommand should have correct invocation\nCheckUnnecessaryNullableCommand should have correct name\nCheckUnnecessaryNullableCommand should have correct usage\nCli runner run with version argument\nCli runner should have correct description\nCli runner should have correct invocation\nanalysisOptionsFromFile constructs AnalysisOptions from extends config\nanalysisOptionsFromFile constructs AnalysisOptions from invalid file\nanalysisOptionsFromFile constructs AnalysisOptions from null\nanalysisOptionsFromFile constructs AnalysisOptions from valid file with single import\nanalysisOptionsFromFile constructs AnalysisOptions from yaml file\ndetectSdkPath should find sdk path inside environment PATH variable\ndetectSdkPath should return `null` for non-Windows platforms\ndetectSdkPath should return `null` if running inside VM\ndetectSdkPath should return null if sdk path is not found inside environment PATH variable"
}
],
"check_url": "html",
"formatted": {
"stats": {
"files": "1",
"errors": [],
"suites": "4",
"duration": "0",
"tests": "20",
"tests_succ": "16",
"tests_skip": "1",
"tests_fail": "1",
"tests_error": "2",
"runs": "20",
"runs_succ": "16",
"runs_skip": "1",
"runs_fail": "1",
"runs_error": "2",
"commit": "commit sha"
}
}
}
80 changes: 80 additions & 0 deletions python/test/files/junit-xml/bazel/suite-logs.results.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
{
"title": "1 errors in 0s",
"summary": "1 tests   0 :white_check_mark:  0s :stopwatch:\n1 suites  0 :zzz:\n1 files    0 :x:  1 :fire:\n\nResults for commit commit s.\n",
"conclusion": "failure",
"stats": {
"files": 1,
"errors": [],
"suites": 1,
"duration": 0,
"tests": 1,
"tests_succ": 0,
"tests_skip": 0,
"tests_fail": 0,
"tests_error": 1,
"runs": 1,
"runs_succ": 0,
"runs_skip": 0,
"runs_fail": 0,
"runs_error": 1,
"commit": "commit sha"
},
"annotations": [
{
"path": "/",
"start_line": 0,
"end_line": 0,
"annotation_level": "failure",
"message": "bazel/suite-logs.xml [took 0s]",
"title": "bazel/failing_absl_test with error",
"raw_details": "exited with error code 1"
},
{
"path": "bazel/failing_absl_test",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "Test suite bazel/failing_absl_test has the following stdout output (see Raw output).",
"title": "Logging on stdout of test suite bazel/failing_absl_test",
"raw_details": "Generated test.log (if the file is not UTF-8, then this may be unreadable):\nexec ${PAGER:-/usr/bin/less} \"$0\" || exit 1\nExecuting tests from //bazel:failing_absl_test\n-----------------------------------------------------------------------------\nTraceback (most recent call last):\n File \"<reducted>\", line 3, in <module>\n import non_existent_package\nModuleNotFoundError: No module named 'non_existent_package'"
},
{
"path": "bazel/failing_absl_test",
"start_line": 0,
"end_line": 0,
"annotation_level": "warning",
"message": "Test suite bazel/failing_absl_test has the following stderr output (see Raw output).",
"title": "Logging on stderr of test suite bazel/failing_absl_test",
"raw_details": "Generated test.err"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There is 1 test, see \"Raw output\" for the name of the test.",
"title": "1 test found",
"raw_details": "bazel/failing_absl_test"
}
],
"check_url": "html",
"formatted": {
"stats": {
"files": "1",
"errors": [],
"suites": "1",
"duration": "0",
"tests": "1",
"tests_succ": "0",
"tests_skip": "0",
"tests_fail": "0",
"tests_error": "1",
"runs": "1",
"runs_succ": "0",
"runs_skip": "0",
"runs_fail": "0",
"runs_error": "1",
"commit": "commit sha"
}
}
}
53 changes: 53 additions & 0 deletions python/test/files/junit-xml/jest/jest-junit.results.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
{
"title": "All 2 tests pass in 0s",
"summary": "2 tests   2 :white_check_mark:  0s :stopwatch:\n1 suites  0 :zzz:\n1 files    0 :x:\n\nResults for commit commit s.\n",
"conclusion": "success",
"stats": {
"files": 1,
"errors": [],
"suites": 1,
"duration": 0,
"tests": 2,
"tests_succ": 2,
"tests_skip": 0,
"tests_fail": 0,
"tests_error": 0,
"runs": 2,
"runs_succ": 2,
"runs_skip": 0,
"runs_fail": 0,
"runs_error": 0,
"commit": "commit sha"
},
"annotations": [
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There are 2 tests, see \"Raw output\" for the full list of tests.",
"title": "2 tests found",
"raw_details": "Load widget via link\nMount iframe"
}
],
"check_url": "html",
"formatted": {
"stats": {
"files": "1",
"errors": [],
"suites": "1",
"duration": "0",
"tests": "2",
"tests_succ": "2",
"tests_skip": "0",
"tests_fail": "0",
"tests_error": "0",
"runs": "2",
"runs_succ": "2",
"runs_skip": "0",
"runs_fail": "0",
"runs_error": "0",
"commit": "commit sha"
}
}
}
80 changes: 80 additions & 0 deletions python/test/files/junit-xml/junit.multiresult.results.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
{
"title": "1 errors, 1 fail, 1 skipped, 1 pass in 1s",
"summary": "1 files  1 suites   1s :stopwatch:\n4 tests 1 :white_check_mark: 1 :zzz: 1 :x: 1 :fire:\n4 runs  -2 :white_check_mark: 3 :zzz: 2 :x: 1 :fire:\n\nResults for commit commit s.\n",
"conclusion": "failure",
"stats": {
"files": 1,
"errors": [],
"suites": 1,
"duration": 1,
"tests": 4,
"tests_succ": 1,
"tests_skip": 1,
"tests_fail": 1,
"tests_error": 1,
"runs": 4,
"runs_succ": -2,
"runs_skip": 3,
"runs_fail": 2,
"runs_error": 1,
"commit": "commit sha"
},
"annotations": [
{
"path": "test class",
"start_line": 0,
"end_line": 0,
"annotation_level": "failure",
"message": "junit.multiresult.xml [took 0s]",
"title": "test that errors (test class) with error",
"raw_details": "test teardown failure\nstdout"
},
{
"path": "test class",
"start_line": 0,
"end_line": 0,
"annotation_level": "warning",
"message": "junit.multiresult.xml [took 0s]",
"title": "test that fails (test class) failed",
"raw_details": "test failure\nAssertion failed"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There is 1 skipped test, see \"Raw output\" for the name of the skipped test.",
"title": "1 skipped test found",
"raw_details": "test class ‑ test that is skipped"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There are 4 tests, see \"Raw output\" for the full list of tests.",
"title": "4 tests found",
"raw_details": "test class ‑ test that errors\ntest class ‑ test that fails\ntest class ‑ test that is skipped\ntest class ‑ test that succeeds"
}
],
"check_url": "html",
"formatted": {
"stats": {
"files": "1",
"errors": [],
"suites": "1",
"duration": "1",
"tests": "4",
"tests_succ": "1",
"tests_skip": "1",
"tests_fail": "1",
"tests_error": "1",
"runs": "4",
"runs_succ": "-2",
"runs_skip": "3",
"runs_fail": "2",
"runs_error": "1",
"commit": "commit sha"
}
}
}
78 changes: 78 additions & 0 deletions python/test/files/junit-xml/minimal-attributes.results.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
{
"title": "1 errors, 1 fail, 1 skipped, 1 pass in 0s",
"summary": "4 tests   1 :white_check_mark:  0s :stopwatch:\n1 suites  1 :zzz:\n1 files    1 :x:  1 :fire:\n\nResults for commit commit s.\n",
"conclusion": "failure",
"stats": {
"files": 1,
"errors": [],
"suites": 1,
"duration": 0,
"tests": 4,
"tests_succ": 1,
"tests_skip": 1,
"tests_fail": 1,
"tests_error": 1,
"runs": 4,
"runs_succ": 1,
"runs_skip": 1,
"runs_fail": 1,
"runs_error": 1,
"commit": "commit sha"
},
"annotations": [
{
"path": "ClassName",
"start_line": 0,
"end_line": 0,
"annotation_level": "warning",
"message": "minimal-attributes.xml",
"title": "failed_test (ClassName) failed"
},
{
"path": "ClassName",
"start_line": 0,
"end_line": 0,
"annotation_level": "failure",
"message": "minimal-attributes.xml",
"title": "error_test (ClassName) with error"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There is 1 skipped test, see \"Raw output\" for the name of the skipped test.",
"title": "1 skipped test found",
"raw_details": "ClassName ‑ skipped_test"
},
{
"path": ".github",
"start_line": 0,
"end_line": 0,
"annotation_level": "notice",
"message": "There are 4 tests, see \"Raw output\" for the full list of tests.",
"title": "4 tests found",
"raw_details": "ClassName ‑ error_test\nClassName ‑ failed_test\nClassName ‑ skipped_test\nClassName ‑ test_name"
}
],
"check_url": "html",
"formatted": {
"stats": {
"files": "1",
"errors": [],
"suites": "1",
"duration": "0",
"tests": "4",
"tests_succ": "1",
"tests_skip": "1",
"tests_fail": "1",
"tests_error": "1",
"runs": "4",
"runs_succ": "1",
"runs_skip": "1",
"runs_fail": "1",
"runs_error": "1",
"commit": "commit sha"
}
}
}
Loading
Loading