From 454c369e93c1ac12706061e1c0ccf6986b383b59 Mon Sep 17 00:00:00 2001 From: Arthur Pastel Date: Thu, 31 Aug 2023 17:50:08 +0200 Subject: [PATCH] feat: add a test for pytest-xdist compatibility --- .github/workflows/ci.yml | 8 ++++---- pyproject.toml | 2 +- tests/conftest.py | 3 +++ tests/test_pytest_plugin.py | 27 +++++++++++++++++++++++++++ 4 files changed, 35 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 90e27ff..356b979 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -49,9 +49,9 @@ jobs: name: Install valgrind run: sudo apt-get install valgrind -y - name: Install dependencies - run: pip install .[dev] - - if: matrix.config == 'pytest-benchmark' - name: Install pytest-benchmark to test compatibility - run: pip install pytest-benchmark~=4.0.0 + run: pip install .[dev,compat] + - if: matrix.config != 'pytest-benchmark' + name: Uninstall pytest-benchmark + run: pip uninstall -y pytest-benchmark - name: Run tests run: pytest -vs diff --git a/pyproject.toml b/pyproject.toml index 21aef01..2d3c054 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,7 +42,7 @@ dependencies = [ [project.optional-dependencies] lint = ["black ~= 23.3.0", "isort ~=5.12.0", "mypy ~= 1.3.0", "ruff ~= 0.0.275"] -compat = ["pytest-benchmark ~= 4.0.0"] +compat = ["pytest-benchmark ~= 4.0.0", "pytest-xdist ~= 2.0.0"] test = ["pytest ~= 7.0", "pytest-cov ~= 4.0.0"] [project.entry-points] diff --git a/tests/conftest.py b/tests/conftest.py index f58a240..0fbbbc9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,9 @@ skip_without_pytest_benchmark = pytest.mark.skipif( not IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark not installed" ) +skip_with_pytest_benchmark = pytest.mark.skipif( + IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark installed" +) if IS_PYTEST_BENCHMARK_INSTALLED: pytest_plugins.append("pytest_benchmark") print( diff --git a/tests/test_pytest_plugin.py b/tests/test_pytest_plugin.py index 70a6d14..216a087 100644 --- a/tests/test_pytest_plugin.py +++ b/tests/test_pytest_plugin.py @@ -5,6 +5,7 @@ from conftest import ( IS_PERF_TRAMPOLINE_SUPPORTED, skip_with_perf_trampoline, + skip_with_pytest_benchmark, skip_without_perf_trampoline, skip_without_pytest_benchmark, skip_without_valgrind, @@ -319,3 +320,29 @@ def fixtured_child(): "py::test_some_addition_fixtured..fixtured_child" in line for line in lines ), "No fixtured child test frame found in perf map" + + +@skip_without_valgrind +@skip_with_pytest_benchmark +def test_pytest_xdist_concurrency_compatibility( + pytester: pytest.Pytester, codspeed_env +) -> None: + pytester.makepyfile( + """ + import time, pytest + + def do_something(): + time.sleep(1) + + @pytest.mark.parametrize("i", range(256)) + def test_my_stuff(benchmark, i): + benchmark(do_something) + """ + ) + # Run the test multiple times to reduce the chance of a false positive + ITERATIONS = 5 + for i in range(ITERATIONS): + with codspeed_env(): + result = pytester.runpytest("--codspeed", "-n", "128") + assert result.ret == 0, "the run should have succeeded" + assert result.stdout.fnmatch_lines(["*256 passed*"])