Skip to content

Commit 454c369

Browse files
committed
feat: add a test for pytest-xdist compatibility
1 parent a752fb9 commit 454c369

File tree

4 files changed

+35
-5
lines changed

4 files changed

+35
-5
lines changed

.github/workflows/ci.yml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,9 +49,9 @@ jobs:
4949
name: Install valgrind
5050
run: sudo apt-get install valgrind -y
5151
- name: Install dependencies
52-
run: pip install .[dev]
53-
- if: matrix.config == 'pytest-benchmark'
54-
name: Install pytest-benchmark to test compatibility
55-
run: pip install pytest-benchmark~=4.0.0
52+
run: pip install .[dev,compat]
53+
- if: matrix.config != 'pytest-benchmark'
54+
name: Uninstall pytest-benchmark
55+
run: pip uninstall -y pytest-benchmark
5656
- name: Run tests
5757
run: pytest -vs

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ dependencies = [
4242

4343
[project.optional-dependencies]
4444
lint = ["black ~= 23.3.0", "isort ~=5.12.0", "mypy ~= 1.3.0", "ruff ~= 0.0.275"]
45-
compat = ["pytest-benchmark ~= 4.0.0"]
45+
compat = ["pytest-benchmark ~= 4.0.0", "pytest-xdist ~= 2.0.0"]
4646
test = ["pytest ~= 7.0", "pytest-cov ~= 4.0.0"]
4747

4848
[project.entry-points]

tests/conftest.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,9 @@
1010
skip_without_pytest_benchmark = pytest.mark.skipif(
1111
not IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark not installed"
1212
)
13+
skip_with_pytest_benchmark = pytest.mark.skipif(
14+
IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark installed"
15+
)
1316
if IS_PYTEST_BENCHMARK_INSTALLED:
1417
pytest_plugins.append("pytest_benchmark")
1518
print(

tests/test_pytest_plugin.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
from conftest import (
66
IS_PERF_TRAMPOLINE_SUPPORTED,
77
skip_with_perf_trampoline,
8+
skip_with_pytest_benchmark,
89
skip_without_perf_trampoline,
910
skip_without_pytest_benchmark,
1011
skip_without_valgrind,
@@ -319,3 +320,29 @@ def fixtured_child():
319320
"py::test_some_addition_fixtured.<locals>.fixtured_child" in line
320321
for line in lines
321322
), "No fixtured child test frame found in perf map"
323+
324+
325+
@skip_without_valgrind
326+
@skip_with_pytest_benchmark
327+
def test_pytest_xdist_concurrency_compatibility(
328+
pytester: pytest.Pytester, codspeed_env
329+
) -> None:
330+
pytester.makepyfile(
331+
"""
332+
import time, pytest
333+
334+
def do_something():
335+
time.sleep(1)
336+
337+
@pytest.mark.parametrize("i", range(256))
338+
def test_my_stuff(benchmark, i):
339+
benchmark(do_something)
340+
"""
341+
)
342+
# Run the test multiple times to reduce the chance of a false positive
343+
ITERATIONS = 5
344+
for i in range(ITERATIONS):
345+
with codspeed_env():
346+
result = pytester.runpytest("--codspeed", "-n", "128")
347+
assert result.ret == 0, "the run should have succeeded"
348+
assert result.stdout.fnmatch_lines(["*256 passed*"])

0 commit comments

Comments
 (0)