Skip to content

Commit

Permalink
Add summary for xfails with -rxX option (#11574)
Browse files Browse the repository at this point in the history
Co-authored-by: Brian Okken <1568356+okken@users.noreply.github.com>
  • Loading branch information
sturmf and okken committed Jan 5, 2024
1 parent c2a4a8d commit 13eacda
Show file tree
Hide file tree
Showing 5 changed files with 166 additions and 19 deletions.
1 change: 1 addition & 0 deletions AUTHORS
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ Erik Hasse
Erik M. Bray
Evan Kepner
Evgeny Seliverstov
Fabian Sturm
Fabien Zarifian
Fabio Zadrozny
Felix Hofstätter
Expand Down
5 changes: 5 additions & 0 deletions changelog/11233.feature.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
Improvements to how ``-r`` for xfailures and xpasses:

* Report tracebacks for xfailures when ``-rx`` is set.
* Report captured output for xpasses when ``-rX`` is set.
* For xpasses, add ``-`` in summary between test name and reason, to match how xfail is displayed.
58 changes: 40 additions & 18 deletions src/_pytest/terminal.py
Original file line number Diff line number Diff line change
Expand Up @@ -878,8 +878,10 @@ def pytest_sessionfinish(
def pytest_terminal_summary(self) -> Generator[None, None, None]:
self.summary_errors()
self.summary_failures()
self.summary_xfailures()
self.summary_warnings()
self.summary_passes()
self.summary_xpasses()
try:
return (yield)
finally:
Expand Down Expand Up @@ -1009,12 +1011,20 @@ def collapsed_location_report(reports: List[WarningReport]) -> str:
)

def summary_passes(self) -> None:
self.summary_passes_combined("passed", "PASSES", "P")

def summary_xpasses(self) -> None:
self.summary_passes_combined("xpassed", "XPASSES", "X")

def summary_passes_combined(
self, which_reports: str, sep_title: str, needed_opt: str
) -> None:
if self.config.option.tbstyle != "no":
if self.hasopt("P"):
reports: List[TestReport] = self.getreports("passed")
if self.hasopt(needed_opt):
reports: List[TestReport] = self.getreports(which_reports)
if not reports:
return
self.write_sep("=", "PASSES")
self.write_sep("=", sep_title)
for rep in reports:
if rep.sections:
msg = self._getfailureheadline(rep)
Expand Down Expand Up @@ -1048,21 +1058,30 @@ def print_teardown_sections(self, rep: TestReport) -> None:
self._tw.line(content)

def summary_failures(self) -> None:
self.summary_failures_combined("failed", "FAILURES")

def summary_xfailures(self) -> None:
self.summary_failures_combined("xfailed", "XFAILURES", "x")

def summary_failures_combined(
self, which_reports: str, sep_title: str, needed_opt: Optional[str] = None
) -> None:
if self.config.option.tbstyle != "no":
reports: List[BaseReport] = self.getreports("failed")
if not reports:
return
self.write_sep("=", "FAILURES")
if self.config.option.tbstyle == "line":
for rep in reports:
line = self._getcrashline(rep)
self.write_line(line)
else:
for rep in reports:
msg = self._getfailureheadline(rep)
self.write_sep("_", msg, red=True, bold=True)
self._outrep_summary(rep)
self._handle_teardown_sections(rep.nodeid)
if not needed_opt or self.hasopt(needed_opt):
reports: List[BaseReport] = self.getreports(which_reports)
if not reports:
return
self.write_sep("=", sep_title)
if self.config.option.tbstyle == "line":
for rep in reports:
line = self._getcrashline(rep)
self.write_line(line)
else:
for rep in reports:
msg = self._getfailureheadline(rep)
self.write_sep("_", msg, red=True, bold=True)
self._outrep_summary(rep)
self._handle_teardown_sections(rep.nodeid)

def summary_errors(self) -> None:
if self.config.option.tbstyle != "no":
Expand Down Expand Up @@ -1168,8 +1187,11 @@ def show_xpassed(lines: List[str]) -> None:
verbose_word, **{_color_for_type["warnings"]: True}
)
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
line = f"{markup_word} {nodeid}"
reason = rep.wasxfail
lines.append(f"{markup_word} {nodeid} {reason}")
if reason:
line += " - " + str(reason)
lines.append(line)

def show_skipped(lines: List[str]) -> None:
skipped: List[CollectReport] = self.stats.get("skipped", [])
Expand Down
2 changes: 1 addition & 1 deletion testing/test_skipping.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ def test_foo():
result.stdout.fnmatch_lines(
[
"*test_strict_xfail*",
"XPASS test_strict_xfail.py::test_foo unsupported feature",
"XPASS test_strict_xfail.py::test_foo - unsupported feature",
]
)
assert result.ret == (1 if strict else 0)
Expand Down
119 changes: 119 additions & 0 deletions testing/test_terminal.py
Original file line number Diff line number Diff line change
Expand Up @@ -2619,3 +2619,122 @@ def test_format_trimmed() -> None:

assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "


def test_summary_xfail_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
assert False
@pytest.mark.xfail(reason="foo")
def test_xfail_reason():
assert False
"""
)
result = pytester.runpytest("-rx")
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail"
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1


def test_summary_xfail_tb(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*_ test_xfail _*",
"* @pytest.mark.xfail*",
"* def test_xfail():*",
"* a, b = 1, 2*",
"> *assert a == b*",
"E *assert 1 == 2*",
"test_summary_xfail_tb.py:6: AssertionError*",
"*= short test summary info =*",
"XFAIL test_summary_xfail_tb.py::test_xfail",
"*= 1 xfailed in * =*",
]
)


def test_xfail_tb_line(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx", "--tb=line")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*test_xfail_tb_line.py:6: assert 1 == 2",
"*= short test summary info =*",
"XFAIL test_xfail_tb_line.py::test_xfail",
"*= 1 xfailed in * =*",
]
)


def test_summary_xpass_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
...
@pytest.mark.xfail(reason="foo")
def test_reason():
...
"""
)
result = pytester.runpytest("-rX")
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1


def test_xpass_output(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
print('hi there')
"""
)
result = pytester.runpytest("-rX")
result.stdout.fnmatch_lines(
[
"*= XPASSES =*",
"*_ test_pass _*",
"*- Captured stdout call -*",
"*= short test summary info =*",
"XPASS test_xpass_output.py::test_pass*",
"*= 1 xpassed in * =*",
]
)

0 comments on commit 13eacda

Please sign in to comment.