Skip to content

Commit

Permalink
twister: pytest: Add --pytest-args to Twister command line
Browse files Browse the repository at this point in the history
Extend Twister command line with --pytest-args. This parameter
is passed to pytest subprocess. It allows to select a specific
testcase from a test suite.

Signed-off-by: Grzegorz Chwierut <[email protected]>
  • Loading branch information
gchwier authored and carlescufi committed Nov 23, 2023
1 parent 8495595 commit a1698b6
Show file tree
Hide file tree
Showing 4 changed files with 96 additions and 4 deletions.
10 changes: 10 additions & 0 deletions doc/develop/test/pytest.rst
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,16 @@ Pytest scans the given locations looking for tests, following its default
`discovery rules <https://docs.pytest.org/en/7.1.x/explanation/goodpractices.html#conventions-for-python-test-discovery>`_
One can also pass some extra arguments to the pytest from yaml file using ``pytest_args`` keyword
under ``harness_config``, e.g.: ``pytest_args: [‘-k=test_method’, ‘--log-level=DEBUG’]``.
There is also an option to pass ``--pytest-args`` through Twister command line parameters.
This can be particularly useful when one wants to select a specific testcase from a test suite.
For instance, one can use a command:

.. code-block:: console
$ ./scripts/twister --platform native_sim -T samples/subsys/testsuite/pytest/shell \
-s samples/subsys/testsuite/pytest/shell/sample.pytest.shell \
--pytest-args='-k test_shell_print_version'
Helpers & fixtures
==================
Expand Down
5 changes: 5 additions & 0 deletions scripts/pylib/twister/twisterlib/environment.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,6 +216,11 @@ def add_parse_arguments(parser = None):
and 'fifo_loop' is a name of a function found in main.c without test prefix.
""")

parser.add_argument("--pytest-args",
help="""Pass additional arguments to the pytest subprocess. This parameter
will override the pytest_args from the harness_config in YAML file.
""")

valgrind_asan_group.add_argument(
"--enable-valgrind", action="store_true",
help="""Run binary through valgrind and check for several memory access
Expand Down
19 changes: 15 additions & 4 deletions scripts/pylib/twister/twisterlib/harness.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,8 +309,9 @@ def pytest_run(self, timeout):

def generate_command(self):
config = self.instance.testsuite.harness_config
handler: Handler = self.instance.handler
pytest_root = config.get('pytest_root', ['pytest']) if config else ['pytest']
pytest_args = config.get('pytest_args', []) if config else []
pytest_args_yaml = config.get('pytest_args', []) if config else []
pytest_dut_scope = config.get('pytest_dut_scope', None) if config else None
command = [
'pytest',
Expand All @@ -324,12 +325,19 @@ def generate_command(self):
]
command.extend([os.path.normpath(os.path.join(
self.source_dir, os.path.expanduser(os.path.expandvars(src)))) for src in pytest_root])
command.extend(pytest_args)

if handler.options.pytest_args:
command.append(handler.options.pytest_args)
if pytest_args_yaml:
logger.warning(f'The pytest_args ({handler.options.pytest_args}) specified '
'in the command line will override the pytest_args defined '
f'in the YAML file {pytest_args_yaml}')
else:
command.extend(pytest_args_yaml)

if pytest_dut_scope:
command.append(f'--dut-scope={pytest_dut_scope}')

handler: Handler = self.instance.handler

if handler.options.verbose > 1:
command.extend([
'--log-cli-level=DEBUG',
Expand Down Expand Up @@ -489,6 +497,9 @@ def _parse_report_file(self, report):
tc.status = 'error'
tc.reason = elem.get('message')
tc.output = elem.text
else:
self.state = 'skipped'
self.instance.reason = 'No tests collected'


class Gtest(Harness):
Expand Down
66 changes: 66 additions & 0 deletions scripts/tests/twister/pytest_integration/test_harness_pytest.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def testinstance() -> TestInstance:
testinstance.handler = mock.Mock()
testinstance.handler.options = mock.Mock()
testinstance.handler.options.verbose = 1
testinstance.handler.options.pytest_args = None
testinstance.handler.type_str = 'native'
return testinstance

Expand Down Expand Up @@ -67,6 +68,18 @@ def test_pytest_command_extra_args(testinstance: TestInstance):
assert c in command


def test_pytest_command_extra_args_in_options(testinstance: TestInstance):
pytest_harness = Pytest()
pytest_args_from_yaml = '-k test_from_yaml'
pytest_args_from_cmd = '-k test_from_cmd'
testinstance.testsuite.harness_config['pytest_args'] = [pytest_args_from_yaml]
testinstance.handler.options.pytest_args = pytest_args_from_cmd
pytest_harness.configure(testinstance)
command = pytest_harness.generate_command()
assert pytest_args_from_cmd in command
assert pytest_args_from_yaml not in command


@pytest.mark.parametrize(
('pytest_root', 'expected'),
[
Expand Down Expand Up @@ -222,3 +235,56 @@ def test_skip_2():
assert len(testinstance.testcases) == 2
for tc in testinstance.testcases:
assert tc.status == "skipped"


def test_if_report_with_filter(pytester, testinstance: TestInstance):
test_file_content = textwrap.dedent("""
import pytest
def test_A():
pass
def test_B():
pass
""")
test_file = pytester.path / 'test_filter.py'
test_file.write_text(test_file_content)
report_file = pytester.path / 'report.xml'
result = pytester.runpytest(
str(test_file),
'-k', 'test_B',
f'--junit-xml={str(report_file)}'
)
result.assert_outcomes(passed=1)
assert report_file.is_file()

pytest_harness = Pytest()
pytest_harness.configure(testinstance)
pytest_harness.report_file = report_file
pytest_harness._update_test_status()
assert pytest_harness.state == "passed"
assert testinstance.status == "passed"
assert len(testinstance.testcases) == 1


def test_if_report_with_no_collected(pytester, testinstance: TestInstance):
test_file_content = textwrap.dedent("""
import pytest
def test_A():
pass
""")
test_file = pytester.path / 'test_filter.py'
test_file.write_text(test_file_content)
report_file = pytester.path / 'report.xml'
result = pytester.runpytest(
str(test_file),
'-k', 'test_B',
f'--junit-xml={str(report_file)}'
)
result.assert_outcomes(passed=0)
assert report_file.is_file()

pytest_harness = Pytest()
pytest_harness.configure(testinstance)
pytest_harness.report_file = report_file
pytest_harness._update_test_status()
assert pytest_harness.state == "skipped"
assert testinstance.status == "skipped"

0 comments on commit a1698b6

Please sign in to comment.