diff --git a/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py b/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py index 72efd3ad0c..1efacea619 100644 --- a/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py +++ b/py/torch_tensorrt/dynamo/conversion/_ConverterRegistry.py @@ -519,7 +519,7 @@ def __contains__(self, key: Target | Node) -> bool: def get_all_converters_with_target( self, key: Target, return_registry_info: bool = False ) -> Tuple[ - List[Any], Dict[str, int] | None + Union[List[Any], Dict[str, int], None] ]: # TODO: Narrow to ConverterImplSignature this when we can remove FX converters """Get all converters across all registries for the target diff --git a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py index fbd9b00673..eaeb6a8c28 100644 --- a/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py +++ b/py/torch_tensorrt/dynamo/runtime/_MutableTorchTensorRTModule.py @@ -252,7 +252,7 @@ def forward(a, b, c=0, d=0): self.refit_state.set_state(RefitFlag.NEEDS_RECOMPILE) - def _get_total_dynamic_shapes(self) -> dict[str, Any] | None: + def _get_total_dynamic_shapes(self) -> Union[dict[str, Any], None]: if not self.arg_dynamic_shapes and not self.kwarg_dynamic_shapes: return None total_dynamic_shape = {}