From e33e55b22effe4778428728c453eb359db5b5b9b Mon Sep 17 00:00:00 2001 From: Evan Li Date: Thu, 6 Mar 2025 11:38:16 -0800 Subject: [PATCH] fix typing issue in py39 --- py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py | 2 +- py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py b/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py index 72efd3ad0c..1efacea619 100644 --- a/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py +++ b/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py @@ -519,7 +519,7 @@ def __contains__(self, key: Target | Node) -> bool: def get_all_converters_with_target( self, key: Target, return_registry_info: bool = False ) -> Tuple[ - List[Any], Dict[str, int] | None + Union[List[Any], Dict[str, int], None] ]: # TODO: Narrow to ConverterImplSignature this when we can remove FX converters """Get all converters across all registries for the target diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index fbd9b00673..eaeb6a8c28 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -252,7 +252,7 @@ def forward(a, b, c=0, d=0): self.refit_state.set_state(RefitFlag.NEEDS_RECOMPILE) - def _get_total_dynamic_shapes(self) -> dict[str, Any] | None: + def _get_total_dynamic_shapes(self) -> Union[dict[str, Any], None]: if not self.arg_dynamic_shapes and not self.kwarg_dynamic_shapes: return None total_dynamic_shape = {}