Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make pytest-adaptavist compatible with pytest 8 #61

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,18 @@ All notable changes to this project will be documented in this file.

The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [Unreleased]

### Breaking change

* Parameters test_run_name and test_plan_name deprecated in 5.6.0 are removed now. Please use test-cycle-name and test-plan-name instead.
* [Following pytest 7](https://docs.pytest.org/en/stable/deprecations.html#passing-msg-to-pytest-skip-pytest-fail-or-pytest-exit), pytest.block needs the keyword argument reason now instead of msg.

### Changed

* pytest 7 is now needed as minimum.
* pytest 8 is supported.

## [5.8.0] - 2022/10/13

### Added
Expand Down
16 changes: 2 additions & 14 deletions pytest_adaptavist/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,18 +66,6 @@ def add_option_ini(
default="origin/master",
help="Branch to restrict to (default: origin/master).",
)
add_option_ini(
"--test_run_name",
dest="test_run_name",
default=TEST_CYCLE_NAME_DEFAULT,
help="Specify test run name (default: <project_key> <test_run_suffix>).",
) # deprecated
add_option_ini(
"--test_plan_name",
dest="test_plan_name_deprecated",
default=TEST_PLAN_NAME_DEFAULT,
help="Specify test plan name (default: <project_key> <test_plan_suffix>).",
) # deprecated
add_option_ini(
"--test-cycle-name",
dest="test_cycle_name",
Expand Down Expand Up @@ -138,9 +126,9 @@ def pytest_configure(config: Config):

# Support for pytest.block
@_with_exception(Blocked)
def block(msg="") -> NoReturn:
def block(reason="") -> NoReturn:
__tracebackhide__ = True # pylint: disable=unused-variable
raise Blocked(msg=msg)
raise Blocked(msg=reason)

pytest.block = block # type: ignore

Expand Down
33 changes: 4 additions & 29 deletions pytest_adaptavist/_pytest_adaptavist.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import pytest
from _pytest._io.saferepr import saferepr
from _pytest.config import Config
from _pytest.deprecated import PytestDeprecationWarning
from _pytest.mark.structures import Mark
from _pytest.outcomes import fail
from _pytest.reports import TestReport
Expand All @@ -35,7 +34,6 @@
html_row,
intersection,
)
from .constants import TEST_CYCLE_NAME_DEFAULT, TEST_PLAN_NAME_DEFAULT


class PytestAdaptavist:
Expand All @@ -50,17 +48,6 @@ class PytestAdaptavist:

def __init__(self, config: Config):
self.config = config
if get_option_ini(config, "test_run_name") != TEST_CYCLE_NAME_DEFAULT: # TODO: Remove in pytest-adaptavist 6
config.issue_config_time_warning(
PytestDeprecationWarning("test_run_name is deprecated. Please use --test-cycle-name"), stacklevel=2
)
if (
get_option_ini(config, "test_plan_name_deprecated") != TEST_PLAN_NAME_DEFAULT
): # TODO: Remove in pytest-adaptavist 6
config.issue_config_time_warning(
PytestDeprecationWarning("test_plan_name is deprecated. Please use --test-plan-name"), stacklevel=2
)

self.item_status_info: dict[str, Any] = {}
self.test_refresh_info: dict[str, Any] = {}
self.test_result_data: dict[str, Any] = {}
Expand Down Expand Up @@ -173,7 +160,7 @@ def pytest_runtest_setup(self, item: pytest.Item):
if not skip_status.kwargs.get("reason", ""):
fail("You need to specify a reason when blocking conditionally.", pytrace=False)
elif any(skip_status.args):
pytest.block(msg=skip_status.kwargs["reason"]) # type: ignore
pytest.block(reason=skip_status.kwargs["reason"]) # type: ignore

if skip_status := item.get_closest_marker("block"):
fullname = get_item_nodeid(item)
Expand All @@ -183,7 +170,7 @@ def pytest_runtest_setup(self, item: pytest.Item):
):
skip_reason = self.test_result_data[fullname].get("comment", "")
if skip_status.name == "block":
pytest.block(msg=skip_reason) # type: ignore
pytest.block(reason=skip_reason) # type: ignore

@pytest.hookimpl()
def pytest_runtest_logreport(self, report: TestReport):
Expand Down Expand Up @@ -612,20 +599,8 @@ def _setup_report(self, worker_input: dict[str, Any]):
* New test plans are named like "<project key> <test plan suffix>" (where test plan suffix must be unique)
* New test runs are named like "<test plan name or project key> <test run suffix> <datetime now>"
"""
test_run_name = self._eval_format(
str(
self.config.getini("test_cycle_name")
if self.config.getini("test_cycle_name") != TEST_CYCLE_NAME_DEFAULT
else self.config.getini("test_run_name")
)
) # TODO: Remove 'if' in pytest-adaptavist 6. Hint for future-me ;) self._eval_format(str(self.config.getini("test_cycle_name")))
test_plan_name = self._eval_format(
str(
self.config.getini("test_plan_name")
if self.config.getini("test_plan_name") != TEST_PLAN_NAME_DEFAULT
else self.config.getini("test_plan_name_deprecated")
)
) # TODO: Remove 'if' in pytest-adaptavist 6
test_run_name = self._eval_format(str(self.config.getini("test_cycle_name")))
test_plan_name = self._eval_format(str(self.config.getini("test_plan_name")))

if self.project_key:
if not self.test_plan_key and self.test_plan_suffix:
Expand Down
8 changes: 4 additions & 4 deletions pytest_adaptavist/metablock.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,14 +264,14 @@ def _process_failed_condition(self, action_on_fail: Action, message_on_fail: str
elif action_on_fail == self.Action.STOP_METHOD:
# STOP_METHOD: skip execution of this block/test, set it to 'Blocked' and continue with next test
self.data["blocked"] = True
pytest.skip(msg=f"Blocked. {self.item_name} failed: {message_on_fail}")
pytest.skip(reason=f"Blocked. {self.item_name} failed: {message_on_fail}")
elif action_on_fail == self.Action.STOP_SESSION:
# STOP_SESSION: skip execution of this block/test, set it to 'Blocked' and block following tests as well
for item in self.items:
item.add_marker("block")
self.adaptavist.test_result_data[fullname]["blocked"] = True
self.adaptavist.test_result_data[fullname]["comment"] = f"Blocked. {self.item_name} failed: {message_on_fail}"
pytest.block(msg=message_on_fail) # type:ignore
pytest.block(reason=message_on_fail) # type:ignore
elif action_on_fail == self.Action.FAIL_SESSION:
# FAIL_SESSION: skip execution of this block/test, set it to 'Fail' and block following tests
for item in self.items:
Expand All @@ -285,10 +285,10 @@ def _process_failed_condition(self, action_on_fail: Action, message_on_fail: str
elif action_on_fail == self.Action.STOP_EXIT_SESSION:
# EXIT_SESSION: skip execution of this block/test, set it to 'Blocked' and exit session
self.item.add_marker("block")
pytest.exit(msg=f"Exiting pytest. {self.item_name} failed: {message_on_fail}", returncode=1)
pytest.exit(reason=f"Exiting pytest. {self.item_name} failed: {message_on_fail}", returncode=1)
elif action_on_fail == self.Action.FAIL_EXIT_SESSION:
# EXIT_SESSION: skip execution of this block/test, set it to 'Blocked' and exit session
pytest.exit(msg=f"Exiting pytest. {self.item_name} failed: {message_on_fail}")
pytest.exit(reason=f"Exiting pytest. {self.item_name} failed: {message_on_fail}")
else:
# CONTINUE: try to collect failed assumption, set result to 'Fail' and continue
pytest.assume(expr=False, msg=message_on_fail) # type:ignore
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
entry_points={"pytest11": ["adaptavist = pytest_adaptavist"]},
platforms="any",
python_requires=">=3.8",
install_requires=["adaptavist>=2.1.0", "pytest>=5.4.0", "pytest-assume>=2.3.2", "pytest-metadata>=1.6.0"],
install_requires=["adaptavist>=2.1.0", "pytest>=7.0.0", "pytest-assume>=2.3.2", "pytest-metadata>=1.6.0"],
extras_require={"test": ["beautifulsoup4", "lxml", "requests"]},
setup_requires=["setuptools_scm"],
keywords="python pytest adaptavist kanoah tm4j jira test testmanagement report",
Expand Down
40 changes: 9 additions & 31 deletions tests/test_pytest_adaptavist.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def test_T123(meta_block):
"""
)
with open("config/global_config.json", "w", encoding="utf8") as file:
file.write('{"test_run_key":"TEST-C1"}')
file.write('{"test_cycle_key":"TEST-C1"}')
hook_record = pytester.inline_run("--adaptavist")
assert hook_record.matchreport().head_line == "test_T123"

Expand Down Expand Up @@ -177,17 +177,17 @@ def test_T124(meta_block):
)
# Test that test cases skipped if append-to-cycle is off and test_case_keys are set
with open("config/global_config.json", "w", encoding="utf8") as file:
file.write('{"project_key": "TEST", "test_run_key":"TEST-C1", "test_case_keys": ["TEST-T123"]}')
file.write('{"project_key": "TEST", "test_cycle_key":"TEST-C1", "test_case_keys": ["TEST-T123"]}')
pytester.runpytest("--adaptavist").assert_outcomes(passed=1, skipped=2)

# Test that test cases which are not defined in test_case_keys are skipped if append-to-cycle is on
with open("config/global_config.json", "w", encoding="utf8") as file:
file.write('{"project_key": "TEST", "test_run_key":"TEST-C1", "test_case_keys": ["TEST-T125"]}')
file.write('{"project_key": "TEST", "test_cycle_key":"TEST-C1", "test_case_keys": ["TEST-T125"]}')
pytester.runpytest("--adaptavist", "--append-to-cycle").assert_outcomes(failed=1, skipped=2)

# Test that test cases run if append-to-cycle is on and test_case_keys are not set
with open("config/global_config.json", "w", encoding="utf8") as file:
file.write('{"project_key": "TEST", "test_run_key":"TEST-C1", "test_case_keys": []}')
file.write('{"project_key": "TEST", "test_cycle_key":"TEST-C1", "test_case_keys": []}')
pytester.runpytest("--adaptavist", "--append-to-cycle").assert_outcomes(passed=2, failed=1)

@pytest.mark.usefixtures("adaptavist_mock")
Expand Down Expand Up @@ -222,11 +222,11 @@ def test_b(meta_block):
)
outcome = pytester.runpytest()
regex = re.findall("\\(not True", str(outcome.outlines).replace("'", "").replace("[", "").replace("]", ""))
assert len(regex) == 2 if running_on_ci() else 1
assert len(regex) == 3 if running_on_ci() else 1
regex = re.findall("\\(False", str(outcome.outlines).replace("'", "").replace("[", "").replace("]", ""))
assert len(regex) == 2 if running_on_ci() else 1
assert len(regex) == 3 if running_on_ci() else 1
regex = re.findall("\\(not not False", str(outcome.outlines).replace("'", "").replace("[", "").replace("]", ""))
assert len(regex) == 2 if running_on_ci() else 1
assert len(regex) == 3 if running_on_ci() else 1

def test_reporting_skipped_test_cases(self, pytester: pytest.Pytester, adaptavist_mock: AdaptavistMock):
"""Don't report a test case if it is not in test_case_keys."""
Expand All @@ -249,28 +249,6 @@ def test_T123(self, meta_block):
assert etrs.call_args_list[0].kwargs["test_case_key"] == "TEST-T123"
assert etrs.call_count == 1

@pytest.mark.filterwarnings("default")
@pytest.mark.usefixtures("adaptavist_mock")
def test_deprecated_options(self, pytester: pytest.Pytester):
"""Test deprecated options."""
pytester.makepyfile(
"""
def test_dummy():
assert True
"""
)
result = pytester.runpytest("--test_run_name=abc", "--adaptavist")
assert any(
"PytestDeprecationWarning: test_run_name is deprecated. Please use --test-cycle-name" in line
for line in result.outlines
)

result = pytester.runpytest("--test_plan_name=abc", "--adaptavist")
assert any(
"PytestDeprecationWarning: test_plan_name is deprecated. Please use --test-plan-name" in line
for line in result.outlines
)

@pytest.mark.usefixtures("adaptavist_mock")
def test_test_run_name(self, pytester: pytest.Pytester):
"""Test that test_run_name template is working."""
Expand Down Expand Up @@ -298,7 +276,7 @@ def test_T123(self, meta_block):
pytester.makeini(
"""
[pytest]
test_run_name = Change test_run_name %(project_key)
test_cycle_name = Change test_run_name %(project_key)
"""
)
pytester.runpytest("--adaptavist")
Expand Down Expand Up @@ -360,7 +338,7 @@ def test_T123(self, meta_block):
pytester.makeini(
"""
[pytest]
test_run_name = Change test_run_name %(project_ey)
test_cycle_name = Change test_run_name %(project_ey)
"""
)
outcome = pytester.runpytest("--adaptavist")
Expand Down
Loading