diff --git a/.github/workflows/code-checks.yml b/.github/workflows/code-checks.yml index e99863dc794d4..8cd4206ab61b7 100644 --- a/.github/workflows/code-checks.yml +++ b/.github/workflows/code-checks.yml @@ -14,8 +14,8 @@ jobs: - uses: actions/setup-python@v2 with: python-version: 3.9 - - name: Install mypy + - name: Install dependencies run: | - grep mypy requirements/test.txt | xargs -0 pip install + pip install '.[dev]' pip list - run: mypy --install-types --non-interactive diff --git a/pyproject.toml b/pyproject.toml index c527ffaa856cf..e3c373aee5aeb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,41 +36,31 @@ disable_error_code = "attr-defined" # style choices warn_no_return = "False" -# TODO: Fix typing for these modules +# Changes mypy default to ignore all errors [[tool.mypy.overrides]] module = [ - "pytorch_lightning.callbacks.*", - "pytorch_lightning.core.*", - "pytorch_lightning.loggers.*", - "pytorch_lightning.loops.*", - "pytorch_lightning.overrides.*", - "pytorch_lightning.plugins.environments.*", - "pytorch_lightning.plugins.training_type.*", - "pytorch_lightning.profiler.*", - "pytorch_lightning.trainer.*", - "pytorch_lightning.distributed.*", - "pytorch_lightning.tuner.*", - "pytorch_lightning.utilities.*", + "pytorch_lightning.*", ] ignore_errors = "True" +# Override the default for files where we would like to enable type checking +# TODO: Bring more files into this section [[tool.mypy.overrides]] module = [ "pytorch_lightning.callbacks.device_stats_monitor", "pytorch_lightning.callbacks.early_stopping", "pytorch_lightning.callbacks.gpu_stats_monitor", "pytorch_lightning.callbacks.gradient_accumulation_scheduler", - "pytorch_lightning.callbacks.lr_monitor", "pytorch_lightning.callbacks.model_summary", "pytorch_lightning.callbacks.progress", "pytorch_lightning.callbacks.pruning", "pytorch_lightning.callbacks.rich_model_summary", "pytorch_lightning.core.optimizer", - "pytorch_lightning.lite.*", - "pytorch_lightning.loops.optimization.*", + "pytorch_lightning.loops.optimization.closure.py", + "pytorch_lightning.loops.optimization.manual_loop.py", "pytorch_lightning.loops.evaluation_loop", - "pytorch_lightning.trainer.connectors.checkpoint_connector", - "pytorch_lightning.trainer.connectors.logger_connector.*", + "pytorch_lightning.trainer.connectors.logger_connector.py", + "pytorch_lightning.trainer.connectors.logger_connector.fx_validator.py", "pytorch_lightning.trainer.connectors.signal_connector", "pytorch_lightning.trainer.progress.*", "pytorch_lightning.tuner.auto_gpu_select", @@ -80,8 +70,6 @@ module = [ "pytorch_lightning.utilities.cloud_io", "pytorch_lightning.utilities.device_dtype_mixin", "pytorch_lightning.utilities.device_parser", - "pytorch_lightning.utilities.distributed", - "pytorch_lightning.utilities.memory", "pytorch_lightning.utilities.model_summary", "pytorch_lightning.utilities.parameter_tying", "pytorch_lightning.utilities.parsing", diff --git a/pytorch_lightning/trainer/connectors/logger_connector/result.py b/pytorch_lightning/trainer/connectors/logger_connector/result.py index ab3c0f1804c2a..e10360a5fb564 100644 --- a/pytorch_lightning/trainer/connectors/logger_connector/result.py +++ b/pytorch_lightning/trainer/connectors/logger_connector/result.py @@ -280,8 +280,8 @@ def wrapped_func(*args: Any, **kwargs: Any) -> Optional[Any]: ) # return cached value - if self._computed is not None: # type: ignore - return self._computed # type: ignore + if self._computed is not None: + return self._computed self._computed = compute(*args, **kwargs) return self._computed diff --git a/pytorch_lightning/utilities/parameter_tying.py b/pytorch_lightning/utilities/parameter_tying.py index 7a074deec9d1d..8278c6510cf4a 100644 --- a/pytorch_lightning/utilities/parameter_tying.py +++ b/pytorch_lightning/utilities/parameter_tying.py @@ -19,7 +19,6 @@ from typing import Dict, List, Optional from torch import nn -from torch.nn import Parameter def find_shared_parameters(module: nn.Module) -> List[str]: @@ -64,7 +63,7 @@ def _get_module_by_path(module: nn.Module, path: str) -> nn.Module: return module -def _set_module_by_path(module: nn.Module, path: str, value: Parameter) -> None: +def _set_module_by_path(module: nn.Module, path: str, value: nn.Module) -> None: path = path.split(".") for name in path[:-1]: module = getattr(module, name)