Skip to content

Commit

Permalink
feat: add a test for pytest-xdist compatibility
Browse files Browse the repository at this point in the history
  • Loading branch information
art049 committed Sep 1, 2023
1 parent a752fb9 commit 454c369
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 5 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ jobs:
name: Install valgrind
run: sudo apt-get install valgrind -y
- name: Install dependencies
run: pip install .[dev]
- if: matrix.config == 'pytest-benchmark'
name: Install pytest-benchmark to test compatibility
run: pip install pytest-benchmark~=4.0.0
run: pip install .[dev,compat]
- if: matrix.config != 'pytest-benchmark'
name: Uninstall pytest-benchmark
run: pip uninstall -y pytest-benchmark
- name: Run tests
run: pytest -vs
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ dependencies = [

[project.optional-dependencies]
lint = ["black ~= 23.3.0", "isort ~=5.12.0", "mypy ~= 1.3.0", "ruff ~= 0.0.275"]
compat = ["pytest-benchmark ~= 4.0.0"]
compat = ["pytest-benchmark ~= 4.0.0", "pytest-xdist ~= 2.0.0"]
test = ["pytest ~= 7.0", "pytest-cov ~= 4.0.0"]

[project.entry-points]
Expand Down
3 changes: 3 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@
skip_without_pytest_benchmark = pytest.mark.skipif(
not IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark not installed"
)
skip_with_pytest_benchmark = pytest.mark.skipif(
IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark installed"
)
if IS_PYTEST_BENCHMARK_INSTALLED:
pytest_plugins.append("pytest_benchmark")
print(
Expand Down
27 changes: 27 additions & 0 deletions tests/test_pytest_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from conftest import (
IS_PERF_TRAMPOLINE_SUPPORTED,
skip_with_perf_trampoline,
skip_with_pytest_benchmark,
skip_without_perf_trampoline,
skip_without_pytest_benchmark,
skip_without_valgrind,
Expand Down Expand Up @@ -319,3 +320,29 @@ def fixtured_child():
"py::test_some_addition_fixtured.<locals>.fixtured_child" in line
for line in lines
), "No fixtured child test frame found in perf map"


@skip_without_valgrind
@skip_with_pytest_benchmark
def test_pytest_xdist_concurrency_compatibility(
pytester: pytest.Pytester, codspeed_env
) -> None:
pytester.makepyfile(
"""
import time, pytest
def do_something():
time.sleep(1)
@pytest.mark.parametrize("i", range(256))
def test_my_stuff(benchmark, i):
benchmark(do_something)
"""
)
# Run the test multiple times to reduce the chance of a false positive
ITERATIONS = 5
for i in range(ITERATIONS):
with codspeed_env():
result = pytester.runpytest("--codspeed", "-n", "128")
assert result.ret == 0, "the run should have succeeded"
assert result.stdout.fnmatch_lines(["*256 passed*"])

0 comments on commit 454c369

Please # to comment.