File tree Expand file tree Collapse file tree 1 file changed +9
-5
lines changed Expand file tree Collapse file tree 1 file changed +9
-5
lines changed Original file line number Diff line number Diff line change 12
12
* :mod:`torch.fx.experimental.config`
13
13
"""
14
14
15
- import os
16
15
import sys
17
16
from typing import Optional
18
17
18
+ from torch .utils ._config_module import Config , install_config_module
19
+
19
20
20
21
__all__ = [
21
22
"job_id" ,
28
29
# FB-internal note: you do NOT have to specify this explicitly specify this if
29
30
# you run on MAST, we will automatically default this to
30
31
# mast:MAST_JOB_NAME:MAST_JOB_VERSION.
31
- job_id : Optional [str ] = os .environ .get ("TORCH_COMPILE_JOB_ID" , None )
32
+ job_id : Config [Optional [str ]] = Config (
33
+ env_name_default = "TORCH_COMPILE_JOB_ID" , default = None
34
+ )
32
35
"""
33
36
Semantically, this should be an identifier that uniquely identifies, e.g., a
34
37
training job. You might have multiple attempts of the same job, e.g., if it was
55
58
consistent profiles across all ranks.
56
59
"""
57
60
58
- cache_key_tag : str = os .environ .get ("TORCH_COMPILE_CACHE_KEY_TAG" , "" )
61
+
62
+ cache_key_tag : Config [str ] = Config (
63
+ env_name_default = "TORCH_COMPILE_CACHE_KEY_TAG" , default = ""
64
+ )
59
65
"""
60
66
Tag to be included in the cache key generation for all torch compile caching.
61
67
A common use case for such a tag is to break caches.
62
68
"""
63
69
64
- from torch .utils ._config_module import install_config_module
65
-
66
70
67
71
install_config_module (sys .modules [__name__ ])
You can’t perform that action at this time.
0 commit comments