Skip to content

Commit d3a4f8d

Browse files
oulgenaditew01
authored andcommitted
Migrate compiler config to Config (pytorch#143152)
Pull Request resolved: pytorch#143152 Approved by: https://github.com/ezyang ghstack dependencies: pytorch#143150, pytorch#143151
1 parent 85b1001 commit d3a4f8d

File tree

1 file changed

+9
-5
lines changed

1 file changed

+9
-5
lines changed

torch/compiler/config.py

Lines changed: 9 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@
1212
* :mod:`torch.fx.experimental.config`
1313
"""
1414

15-
import os
1615
import sys
1716
from typing import Optional
1817

18+
from torch.utils._config_module import Config, install_config_module
19+
1920

2021
__all__ = [
2122
"job_id",
@@ -28,7 +29,9 @@
2829
# FB-internal note: you do NOT have to specify this explicitly specify this if
2930
# you run on MAST, we will automatically default this to
3031
# mast:MAST_JOB_NAME:MAST_JOB_VERSION.
31-
job_id: Optional[str] = os.environ.get("TORCH_COMPILE_JOB_ID", None)
32+
job_id: Config[Optional[str]] = Config(
33+
env_name_default="TORCH_COMPILE_JOB_ID", default=None
34+
)
3235
"""
3336
Semantically, this should be an identifier that uniquely identifies, e.g., a
3437
training job. You might have multiple attempts of the same job, e.g., if it was
@@ -55,13 +58,14 @@
5558
consistent profiles across all ranks.
5659
"""
5760

58-
cache_key_tag: str = os.environ.get("TORCH_COMPILE_CACHE_KEY_TAG", "")
61+
62+
cache_key_tag: Config[str] = Config(
63+
env_name_default="TORCH_COMPILE_CACHE_KEY_TAG", default=""
64+
)
5965
"""
6066
Tag to be included in the cache key generation for all torch compile caching.
6167
A common use case for such a tag is to break caches.
6268
"""
6369

64-
from torch.utils._config_module import install_config_module
65-
6670

6771
install_config_module(sys.modules[__name__])

0 commit comments

Comments
 (0)