File tree Expand file tree Collapse file tree 2 files changed +4
-10
lines changed Expand file tree Collapse file tree 2 files changed +4
-10
lines changed Original file line number Diff line number Diff line change 2121
2222if  TYPE_CHECKING :
2323    from  vllm .config  import  ModelConfig , VllmConfig 
24- else :
25-     ModelConfig  =  None 
26-     VllmConfig  =  None 
2724
2825logger  =  init_logger (__name__ )
2926
@@ -109,7 +106,7 @@ def log_warnings(cls):
109106        pass 
110107
111108    @classmethod  
112-     def  check_and_update_config (cls , vllm_config : VllmConfig ) ->  None :
109+     def  check_and_update_config (cls , vllm_config : " VllmConfig" ->  None :
113110        parallel_config  =  vllm_config .parallel_config 
114111        scheduler_config  =  vllm_config .scheduler_config 
115112        compilation_config  =  vllm_config .compilation_config 
@@ -308,7 +305,7 @@ def supports_fp8(cls) -> bool:
308305        return  cls .has_device_capability (89 )
309306
310307    @classmethod  
311-     def  supports_v1 (cls , model_config : ModelConfig ) ->  bool :
308+     def  supports_v1 (cls , model_config : " ModelConfig" ->  bool :
312309        return  True 
313310
314311    @classmethod  
Original file line number Diff line number Diff line change 1313
1414if  TYPE_CHECKING :
1515    from  vllm .config  import  ModelConfig , VllmConfig 
16- else :
17-     ModelConfig  =  None 
18-     VllmConfig  =  None 
1916
2017logger  =  init_logger (__name__ )
2118
@@ -243,7 +240,7 @@ def is_async_output_supported(cls, enforce_eager: Optional[bool]) -> bool:
243240        return  True 
244241
245242    @classmethod  
246-     def  check_and_update_config (cls , vllm_config : VllmConfig ) ->  None :
243+     def  check_and_update_config (cls , vllm_config : " VllmConfig" ->  None :
247244        cache_config  =  vllm_config .cache_config 
248245        if  cache_config  and  cache_config .block_size  is  None :
249246            cache_config .block_size  =  16 
@@ -332,7 +329,7 @@ def fp8_dtype(cls) -> torch.dtype:
332329            return  torch .float8_e4m3fn 
333330
334331    @classmethod  
335-     def  supports_v1 (cls , model_config : ModelConfig ) ->  bool :
332+     def  supports_v1 (cls , model_config : " ModelConfig" ->  bool :
336333        # V1 support on AMD gpus is experimental 
337334        return  True 
338335
 
 
   
 
     
   
   
          
    
    
     
    
      
     
     
    You can’t perform that action at this time.
  
 
    
  
    
      
        
     
       
      
     
   
 
    
    
  
 
  
 
     
    
0 commit comments