Skip to content

Commit

Permalink
feat: allow running along with pytest-benchmarks
Browse files Browse the repository at this point in the history
  • Loading branch information
art049 committed Nov 10, 2022
1 parent 276679b commit b6ccadb
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 31 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ dev = [
"pytest-cov ~= 4.0.0",
"ruff ~= 0.0.100",
]
compatibility = ["pytest-benchmarks ~= 3.4.1"]

[project.entry-points]
pytest11 = { codspeed = "pytest_codspeed.plugin" }
Expand Down
82 changes: 62 additions & 20 deletions src/pytest_codspeed/plugin.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,19 @@
import os
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any, Callable, List, Optional
import pkgutil
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple

import pytest
from _pytest.fixtures import FixtureManager

from . import __version__
from ._wrapper import get_lib

if TYPE_CHECKING:
from ._wrapper import LibType

IS_PYTEST_BENCHMARK_INSTALLED = pkgutil.find_loader("pytest_benchmark") is not None


@pytest.hookimpl(trylast=True)
def pytest_addoption(parser: "pytest.Parser"):
Expand All @@ -26,8 +30,9 @@ def pytest_addoption(parser: "pytest.Parser"):
class CodSpeedPlugin:
is_codspeed_enabled: bool
should_measure: bool
lib: Optional["LibType"] = None
benchmark_count: int = 0
lib: Optional["LibType"]
disabled_plugins: Tuple[str, ...]
benchmark_count: int = field(default=0, hash=False, compare=False)


PLUGIN_NAME = "codspeed_plugin"
Expand All @@ -37,35 +42,69 @@ def get_plugin(config: "pytest.Config") -> "CodSpeedPlugin":
return config.pluginmanager.get_plugin(PLUGIN_NAME)


@pytest.hookimpl()
@pytest.hookimpl(tryfirst=True)
def pytest_configure(config: "pytest.Config"):
config.addinivalue_line(
"markers", "codspeed_benchmark: mark an entire test for codspeed benchmarking"
)
config.addinivalue_line(
"markers", "benchmark: mark an entire test for codspeed benchmarking"
)
is_codspeed_enabled = (
config.getoption("--codspeed") or os.environ.get("CODSPEED_ENV") is not None
)
should_measure = os.environ.get("CODSPEED_ENV") is not None
lib = get_lib() if should_measure else None
disabled_plugins: List[str] = []
# Disable pytest-benchmark if codspeed is enabled
if is_codspeed_enabled and IS_PYTEST_BENCHMARK_INSTALLED:
object.__setattr__(config.option, "benchmark_disable", True)
config.pluginmanager.set_blocked("pytest-benchmark")
disabled_plugins.append("pytest-benchmark")

plugin = CodSpeedPlugin(
is_codspeed_enabled=config.getoption("--codspeed")
or os.environ.get("CODSPEED_ENV") is not None,
should_measure=os.environ.get("CODSPEED_ENV") is not None,
is_codspeed_enabled=is_codspeed_enabled,
should_measure=should_measure,
lib=lib,
disabled_plugins=tuple(disabled_plugins),
)
if plugin.should_measure:
plugin.lib = get_lib()
config.pluginmanager.register(plugin, PLUGIN_NAME)


def pytest_plugin_registered(plugin, manager: "pytest.PytestPluginManager"):
"""Patch the benchmark fixture to use the codspeed one if codspeed is enabled"""
if IS_PYTEST_BENCHMARK_INSTALLED and isinstance(plugin, FixtureManager):
fixture_manager = plugin
codspeed_plugin: CodSpeedPlugin = manager.get_plugin(PLUGIN_NAME)
if codspeed_plugin.is_codspeed_enabled:
codspeed_benchmark_fixtures = plugin.getfixturedefs(
"codspeed_benchmark", ""
)
assert codspeed_benchmark_fixtures is not None
fixture_manager._arg2fixturedefs[
"__benchmark"
] = fixture_manager._arg2fixturedefs["benchmark"]
fixture_manager._arg2fixturedefs["benchmark"] = list(
codspeed_benchmark_fixtures
)


@pytest.hookimpl(trylast=True)
def pytest_report_header(config: "pytest.Config"):
out = [f"codspeed: {__version__}"]
plugin = get_plugin(config)
if plugin.is_codspeed_enabled and not plugin.should_measure:
out.append(
"\033[93m"
"\033[1m"
"NOTICE: codspeed is enabled, but no performance measurement"
" will be made since it's running in an unknown environment."
"\033[0m"
)
if len(plugin.disabled_plugins) > 0:
out.append(
"\033[93mCodSpeed had to disable the following plugins: "
f"{', '.join(plugin.disabled_plugins)}\033[0m"
)
return "\n".join(out)


Expand Down Expand Up @@ -101,11 +140,12 @@ def pytest_collection_modifyitems(
items[:] = selected


@pytest.hookimpl()
@pytest.hookimpl(trylast=True)
def pytest_runtest_call(item: "pytest.Item"):
plugin = get_plugin(item.config)

if not plugin.is_codspeed_enabled or not should_benchmark_item(item):
item.runtest()
return # Avoid running the test multiple times when codspeed is disabled
else:
plugin.benchmark_count += 1
if "benchmark" in getattr(item, "fixturenames", []):
Expand Down Expand Up @@ -134,7 +174,7 @@ def pytest_sessionfinish(session: "pytest.Session", exitstatus):
)


@pytest.fixture
@pytest.fixture(scope="function")
def codspeed_benchmark(request: "pytest.FixtureRequest") -> Callable:
plugin = get_plugin(request.config)

Expand All @@ -152,9 +192,11 @@ def run(func: Callable[..., Any], *args: Any):
return run


@pytest.fixture
def benchmark(codspeed_benchmark):
"""
Compatibility with pytest-benchmark
"""
return codspeed_benchmark
if not IS_PYTEST_BENCHMARK_INSTALLED:

@pytest.fixture(scope="function")
def benchmark(codspeed_benchmark, request: "pytest.FixtureRequest"):
"""
Compatibility with pytest-benchmark
"""
return codspeed_benchmark
7 changes: 7 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1 +1,8 @@
import pkgutil
import sys

pytest_plugins = ["pytester"]

if pkgutil.find_loader("pytest_benchmark") is not None:
pytest_plugins.append("pytest_benchmark")
print("NOTICE: Testing with pytest-benchmark compatibility", file=sys.stderr)
27 changes: 16 additions & 11 deletions tests/test_pytest_plugin.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
import shutil
from contextlib import contextmanager

Expand All @@ -10,13 +9,17 @@
)


@contextmanager
def codspeed_env():
os.environ["CODSPEED_ENV"] = "1"
try:
yield
finally:
del os.environ["CODSPEED_ENV"]
@pytest.fixture(scope="function")
def codspeed_env(monkeypatch):
@contextmanager
def ctx_manager():
monkeypatch.setenv("CODSPEED_ENV", "1")
try:
yield
finally:
monkeypatch.delenv("CODSPEED_ENV", raising=False)

return ctx_manager


def test_plugin_enabled_without_env(pytester: pytest.Pytester) -> None:
Expand All @@ -42,7 +45,7 @@ def _():


@skip_without_valgrind
def test_plugin_enabled_by_env(pytester: pytest.Pytester) -> None:
def test_plugin_enabled_by_env(pytester: pytest.Pytester, codspeed_env) -> None:
pytester.makepyfile(
"""
def test_some_addition_performance(benchmark):
Expand All @@ -57,7 +60,7 @@ def _():


@skip_without_valgrind
def test_plugin_enabled_and_env(pytester: pytest.Pytester) -> None:
def test_plugin_enabled_and_env(pytester: pytest.Pytester, codspeed_env) -> None:
pytester.makepyfile(
"""
def test_some_addition_performance(benchmark):
Expand Down Expand Up @@ -85,7 +88,9 @@ def _():


@skip_without_valgrind
def test_plugin_enabled_nothing_to_benchmark(pytester: pytest.Pytester) -> None:
def test_plugin_enabled_nothing_to_benchmark(
pytester: pytest.Pytester, codspeed_env
) -> None:
pytester.makepyfile(
"""
def test_some_addition_performance():
Expand Down

0 comments on commit b6ccadb

Please # to comment.