Skip to content

Commit

Permalink
Fix verbose output reporting on Linux
Browse files Browse the repository at this point in the history
  • Loading branch information
nicoddemus committed Apr 4, 2019
1 parent 38cc3cc commit 95d26b6
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 11 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,13 @@
CHANGELOG
=========

0.2.1 (2019-04-04)
------------------

* Fix verbose output reporting on Linux (`#7`_).

.. _#7: https://github.com/pytest-dev/pytest-subtests/issues/7

0.2.0 (2019-04-03)
------------------

Expand Down
19 changes: 17 additions & 2 deletions pytest_subtests.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import sys
from contextlib import contextmanager
from time import time

Expand Down Expand Up @@ -77,16 +78,29 @@ def pytest_unconfigure():
if hasattr(TestCaseFunction, "failfast"):
del TestCaseFunction.failfast

if sys.version_info[:2] < (3, 7):
@contextmanager
def nullcontext():
yield
else:
from contextlib import nullcontext


@pytest.fixture
def subtests(request):
yield SubTests(request.node.ihook, request.node)
capmam = request.node.config.pluginmanager.get_plugin('capturemanager')
if capmam is not None:
suspend_capture_ctx = capmam.global_and_fixture_disabled
else:
suspend_capture_ctx = nullcontext
yield SubTests(request.node.ihook, request.node, suspend_capture_ctx)


@attr.s
class SubTests(object):
ihook = attr.ib()
item = attr.ib()
suspend_capture_ctx = attr.ib()

@contextmanager
def test(self, msg=None, **kwargs):
Expand All @@ -100,7 +114,8 @@ def test(self, msg=None, **kwargs):
call_info = CallInfo(None, exc_info, start, stop, when="call")
sub_report = SubTestReport.from_item_and_call(item=self.item, call=call_info)
sub_report.context = SubTestContext(msg, kwargs.copy())
self.ihook.pytest_runtest_logreport(report=sub_report)
with self.suspend_capture_ctx():
self.ihook.pytest_runtest_logreport(report=sub_report)


def pytest_report_to_serializable(report):
Expand Down
104 changes: 95 additions & 9 deletions tests/test_subtests.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
import sys

import pytest


@pytest.mark.parametrize("mode", ["normal", "xdist"])
class TestFixture:
"""
TODO: test skips, xfails
Tests for ``subtests`` fixture.
"""

def test_simple_terminal_out(self, testdir, mode):
@pytest.fixture
def simple_script(self, testdir):
testdir.makepyfile(
"""
def test_foo(subtests):
Expand All @@ -16,6 +19,8 @@ def test_foo(subtests):
assert i % 2 == 0
"""
)

def test_simple_terminal_normal(self, simple_script, testdir, mode):
if mode == "normal":
result = testdir.runpytest()
expected_lines = ["collected 1 item"]
Expand All @@ -31,6 +36,38 @@ def test_foo(subtests):
]
result.stdout.fnmatch_lines(expected_lines)

def test_simple_terminal_verbose(self, simple_script, testdir, mode):
if mode == "normal":
result = testdir.runpytest("-v")
expected_lines = [
"*collected 1 item",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
]
else:
pytest.importorskip("xdist")
result = testdir.runpytest("-n1", "-v")
expected_lines = [
"gw0 [1]",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
"*gw0*100%* test_simple_terminal_verbose.py::test_foo*",
]

expected_lines += [
"* test_foo [[]custom[]] (i=1) *",
"* test_foo [[]custom[]] (i=3) *",
"* 2 failed, 1 passed in *",
]
result.stdout.fnmatch_lines(expected_lines)

def test_skip(self, testdir, mode):
testdir.makepyfile(
"""
Expand All @@ -55,12 +92,12 @@ def test_foo(subtests):

class TestSubTest:
"""
# TODO: test skips, xfails
Test Test.subTest functionality.
"""

@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
def test_simple_terminal_out(self, testdir, runner):
p = testdir.makepyfile(
@pytest.fixture
def simple_script(self, testdir):
return testdir.makepyfile(
"""
from unittest import TestCase, main
Expand All @@ -75,8 +112,12 @@ def test_foo(self):
main()
"""
)

@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
def test_simple_terminal_normal(self, simple_script, testdir, runner):

if runner == "unittest":
result = testdir.runpython(p)
result = testdir.run(sys.executable, simple_script)
result.stderr.fnmatch_lines(
[
"FAIL: test_foo (__main__.T) [custom] (i=1)",
Expand All @@ -89,11 +130,11 @@ def test_foo(self):
)
else:
if runner == "pytest-normal":
result = testdir.runpytest(p)
result = testdir.runpytest(simple_script)
expected_lines = ["collected 1 item"]
else:
pytest.importorskip("xdist")
result = testdir.runpytest("-n1")
result = testdir.runpytest(simple_script, "-n1")
expected_lines = ["gw0 [1]"]
result.stdout.fnmatch_lines(
expected_lines
Expand All @@ -106,6 +147,51 @@ def test_foo(self):
]
)

@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
def test_simple_terminal_verbose(self, simple_script, testdir, runner):

if runner == "unittest":
result = testdir.run(sys.executable, simple_script, "-v")
result.stderr.fnmatch_lines(
[
"test_foo (__main__.T) ... ",
"FAIL: test_foo (__main__.T) [custom] (i=1)",
"AssertionError: 1 != 0",
"FAIL: test_foo (__main__.T) [custom] (i=3)",
"AssertionError: 1 != 0",
"Ran 1 test in *",
"FAILED (failures=2)",
]
)
else:
if runner == "pytest-normal":
result = testdir.runpytest(simple_script, "-v")
expected_lines = [
"*collected 1 item",
"test_simple_terminal_verbose.py::T::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::T::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::T::test_foo PASSED *100%*",
]
else:
pytest.importorskip("xdist")
result = testdir.runpytest(simple_script, "-n1", "-v")
expected_lines = [
"gw0 [1]",
"*gw0*100%* FAILED test_simple_terminal_verbose.py::T::test_foo*",
"*gw0*100%* FAILED test_simple_terminal_verbose.py::T::test_foo*",
"*gw0*100%* PASSED test_simple_terminal_verbose.py::T::test_foo*",
]
result.stdout.fnmatch_lines(
expected_lines
+ [
"* T.test_foo [[]custom[]] (i=1) *",
"E * AssertionError: 1 != 0",
"* T.test_foo [[]custom[]] (i=3) *",
"E * AssertionError: 1 != 0",
"* 2 failed, 1 passed in *",
]
)

@pytest.mark.parametrize("runner", ["unittest", "pytest-normal", "pytest-xdist"])
def test_skip(self, testdir, runner):
p = testdir.makepyfile(
Expand Down

0 comments on commit 95d26b6

Please sign in to comment.