Skip to content

Commit 6c17059

Browse files
authored
Merge pull request #74 from ngoldbaum/skip-thread-unsafe
2 parents 120db6e + e38e28d commit 6c17059

File tree

3 files changed

+55
-8
lines changed

3 files changed

+55
-8
lines changed

README.md

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -61,9 +61,11 @@ those fixtures are shared between threads.
6161

6262
## Features
6363

64-
- Two global CLI flags:
64+
- Three global CLI flags:
6565
- `--parallel-threads` to run a test suite in parallel
6666
- `--iterations` to run multiple times in each thread
67+
- `--skip-thread-unsafe` to skip running tests marked as or
68+
detected to be thread-unsafe.
6769

6870
- Three corresponding markers:
6971
- `pytest.mark.parallel_threads(n)` to mark a single test to run
@@ -239,6 +241,13 @@ def test_skip_if_parallel(num_parallel_threads):
239241
...
240242
```
241243

244+
You can skip tests marked as or detected to be thread-unsafe by passing
245+
`--skip-thread-unsafe` in your pytest invocation. This is useful when running
246+
pytest-run-parallel under [Thread
247+
Sanitizer](https://clang.llvm.org/docs/ThreadSanitizer.html). Setting
248+
`--skip-thread-unsafe=True` will avoid unnecessarily running tests where thread
249+
sanitizer cannot detect races because the test is not parallelized.
250+
242251
Finally, the `thread_comp` fixture allows for parallel test debugging,
243252
by providing an instance of `ThreadComparator`, whose `__call__` method
244253
allows to check if all the values produced by all threads during an

src/pytest_run_parallel/plugin.py

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,14 @@ def pytest_addoption(parser):
3232
type=int,
3333
help="Set the number of threads used to execute each test concurrently.",
3434
)
35+
parser.addoption(
36+
"--skip-thread-unsafe",
37+
action="store",
38+
dest="skip_thread_unsafe",
39+
help="Whether to skip running thread-unsafe tests",
40+
type=bool,
41+
default=False,
42+
)
3543
parser.addini(
3644
"thread_unsafe_fixtures",
3745
"list of thread-unsafe fixture names that cause a test to "
@@ -145,6 +153,8 @@ def pytest_itemcollected(item):
145153
fixtures = getattr(item, "fixturenames", ())
146154

147155
n_iterations = item.config.option.iterations
156+
skip_thread_unsafe = item.config.option.skip_thread_unsafe
157+
148158
m = item.get_closest_marker("iterations")
149159
if m is not None:
150160
n_iterations = int(m.args[0])
@@ -153,13 +163,13 @@ def pytest_itemcollected(item):
153163
if n_workers > 1 and m is not None:
154164
n_workers = 1
155165
reason = m.kwargs.get("reason", None)
156-
if reason is not None:
157-
item.user_properties.append(("thread_unsafe_reason", reason))
166+
if reason is None:
167+
reason = "uses thread_unsafe marker"
168+
item.user_properties.append(("thread_unsafe_reason", reason))
169+
if skip_thread_unsafe:
170+
item.add_marker(pytest.mark.skip(reason=f"Thread unsafe: {reason}"))
158171
else:
159-
item.user_properties.append(
160-
("thread_unsafe_reason", "uses thread_unsafe marker")
161-
)
162-
item.add_marker(pytest.mark.parallel_threads(1))
172+
item.add_marker(pytest.mark.parallel_threads(1))
163173

164174
if not hasattr(item, "obj"):
165175
if hasattr(item, "_parallel_custom_item"):
@@ -190,7 +200,12 @@ def pytest_itemcollected(item):
190200
if thread_unsafe:
191201
n_workers = 1
192202
item.user_properties.append(("thread_unsafe_reason", thread_unsafe_reason))
193-
item.add_marker(pytest.mark.parallel_threads(1))
203+
if skip_thread_unsafe:
204+
item.add_marker(
205+
pytest.mark.skip(reason=f"Thread unsafe: {thread_unsafe_reason}")
206+
)
207+
else:
208+
item.add_marker(pytest.mark.parallel_threads(1))
194209

195210
unsafe_fixtures = _thread_unsafe_fixtures | set(
196211
item.config.getini("thread_unsafe_fixtures")

tests/test_run_parallel.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -589,6 +589,15 @@ def test_should_run_single_2(num_parallel_threads):
589589
]
590590
)
591591

592+
# check that skipping works too
593+
result = pytester.runpytest(
594+
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v"
595+
)
596+
597+
result.stdout.fnmatch_lines(
598+
["*::test_should_run_single SKIPPED*", "*::test_should_run_single_2 SKIPPED*"]
599+
)
600+
592601

593602
def test_pytest_warns_detection(pytester):
594603
# create a temporary pytest test module
@@ -636,6 +645,20 @@ def test_single_thread_warns_4(num_parallel_threads):
636645
]
637646
)
638647

648+
# check that skipping works too
649+
result = pytester.runpytest(
650+
"--parallel-threads=10", "--skip-thread-unsafe=True", "-v"
651+
)
652+
653+
result.stdout.fnmatch_lines(
654+
[
655+
"*::test_single_thread_warns_1 SKIPPED*",
656+
"*::test_single_thread_warns_2 SKIPPED*",
657+
"*::test_single_thread_warns_3 SKIPPED*",
658+
"*::test_single_thread_warns_4 SKIPPED*",
659+
]
660+
)
661+
639662

640663
@pytest.mark.skipif(psutil is None, reason="psutil needs to be installed")
641664
def test_auto_detect_cpus_psutil_affinity(

0 commit comments

Comments
 (0)