We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a17c027 commit b7a22baCopy full SHA for b7a22ba
tests/conftest.py
@@ -21,6 +21,16 @@
21
import torch.multiprocessing as mp
22
23
24
+@pytest.fixture(scope="function", autouse=True)
25
+def preserve_global_rank_variable():
26
+ """ Ensures that the rank_zero_only.rank global variable gets reset in each test. """
27
+ from pytorch_lightning.utilities.distributed import rank_zero_only
28
+ rank = getattr(rank_zero_only, "rank", None)
29
+ yield
30
+ if rank is not None:
31
+ setattr(rank_zero_only, "rank", rank)
32
+
33
34
@pytest.fixture(scope="function", autouse=True)
35
def restore_env_variables():
36
""" Ensures that environment variables set during the test do not leak out. """
0 commit comments