I'm using allure-pytest. I'm using stories/markers to represent Test Plan requirements. I add a list of markers to pytest.ini file. In conftest.py I generate a set of markers that haven't been used. I'd like to add each missing marker as a skipped test to my allure report. Is there a way to do this?
I've tried creating tests in conftest.py
import pytest
import allure
import allure_commons
from allure_commons.model2 import TestResult, Status, Label, Parameter
from allure_commons.types import AttachmentType
from allure_commons.reporter import AllureReporter
from allure_commons.utils import uuid4
def pytest_collection_finish(session):
# get all the available markers defined in pytest.ini
marker_dict = session.config.getini('markers')
markers = set(marker_dict)
# get all the markers used in the collected tests
used_markers = set()
for item in session.items:
used_markers.update(set([m.name for m in item.iter_markers()]))
# find the markers defined in pytest.ini but not used in any of the tests
unused_markers = markers - used_markers
item = session.items[0]
# add skipped tests for the unused markers
if unused_markers and item:
print(item.parent)
for marker in unused_markers:
print("adding skipped test: ", marker)
args = marker.split(":")
name = args[0]
add_external_test_result(name, "skipped")
session.items.append(create_skipped_test(name, marker)) # generates an error for missing config
#skipped_test = pytest.Function(name=f"test_missing_{name}", parent=None, config=item.config) # generates an error for deprecated method
#skipped_test = item.from_parent(parent=item.parent, name=f"test_missing_{name}")
#skipped_test.add_marker(pytest.mark.skip(reason=f"No test created for marker({marker})"))
#session.items.append(skipped_test) # generates an error for the method not existing in the parent
def create_skipped_test(name, marker):
# create a dummy test function with a skipped marker
@pytest.mark.skip(reason=f"unmarked marker: {name}")
@pytest.mark.usefixtures() # add any necessary fixtures
def test_dummy():
pass
# add the marker to the test function
test_dummy = pytest.mark.parametrize(name, [marker])(test_dummy)
return test_dummy
Well, that was pretty tricky, but I think I have achieved desired behavior using pytest_generate_tests
hook and pytest_collection_modifyitems
hook wrapper.
First of all, you need to add test_dummy
somewhere in your tests
directory like this:
test_dummy.py
def test_dummy():
pass
Then in conftest.py you should define following hook functions:
import pytest
from _pytest.mark import Mark
@pytest.hookimpl(tryfirst=True, hookwrapper=True)
def pytest_collection_modifyitems(items):
# get all test_dummy instances (with each mark parameter)
skip_tests = [item for item in items if item.originalname == "test_dummy"]
# remove all test_dummy from items to prevent it's execution without skip mark
items[:] = [item for item in items if item.originalname != "test_dummy"]
# let all other pytest_collection_modifyitems filter items
yield
# get set of all used markers
used_markers = set()
for item in items:
used_markers.update(set([m.name for m in item.iter_markers()]))
for item in skip_tests:
# get mark name from parameter name of test_dummy instance
mark = item.name.rsplit("[")[-1][:-1]
# if mark name is not in used markers set:
if mark not in used_markers:
# add test_dummy with this mark and skip mark to items list
item.own_markers = [Mark(mark, (), {}), Mark("skip", (), {"reason": f"skip mark: `{mark}`"})]
items.append(item)
def pytest_generate_tests(metafunc):
"""Generate parametrization for test_dummy instance using only custom markers from `pytest.ini`."""
if metafunc.definition.name == "test_dummy":
# get list of CUSTOM markers from pytest.ini (without pytest plugins builtins markers!)
markers = set(mark.split(":", maxsplit=1)[0] for mark in metafunc.config._getini('markers'))
# add mark parameter to function arguments
if "mark" not in metafunc.fixturenames:
metafunc.fixturenames.append("mark")
metafunc.parametrize("mark", [pytest.param(mark) for mark in markers])
I have added some comments to code, so you can track it's logic.
Basically, it creates parametrize for test_dummy
using every custom marker (defined in pytest.ini). Then it stores all of test_dummy
instances in separate list and removes them from items
.
After all filtering was done by other plugins (including pytest markers filtering) it gets list of used markers. For each test_dummy
instance that has parameter which is not in used list we add skip
mark and parameter name mark and add it to final items
list.
Example:
I have following markers in pytest.ini: smoke
, regression
, unit
.
I have one test for each marker. I run pytest -m smoke --alluredir results
.
Let me know what you think. Is it what you tried to achieve?
UPD
Also you can add @allure.title
for test_dummy
to make it look better in report.
import allure
@allure.title("Skipped mark")
def test_dummy():
pass