We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent a30a605 commit 7c2134bCopy full SHA for 7c2134b
vllm/plugins/__init__.py
@@ -3,6 +3,8 @@
3
from contextlib import contextmanager
4
from typing import TYPE_CHECKING, Optional
5
6
+import torch
7
+
8
import vllm.envs as envs
9
10
if TYPE_CHECKING:
@@ -26,7 +28,8 @@ def load_general_plugins():
26
28
27
29
# see https://github.com/vllm-project/vllm/issues/10480
30
os.environ['TORCHINDUCTOR_COMPILE_THREADS'] = '1'
-
31
+ # see https://github.com/vllm-project/vllm/issues/10619
32
+ torch._inductor.config.compile_threads = 1
33
global plugins_loaded
34
if plugins_loaded:
35
return
0 commit comments