diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 0dbf13e4936b8..5488640dacf79 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -198,6 +198,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed the deprecated way to set the distributed backend via the environment variable `PL_TORCH_DISTRIBUTED_BACKEND`, in favor of setting the `process_group_backend` in the strategy constructor ([#14693](https://github.com/Lightning-AI/lightning/pull/14693)) +- Removed the deprecated `Trainer.use_amp` and `LightningModule.use_amp` attributes ([#14832](https://github.com/Lightning-AI/lightning/pull/14832)) + + ### Fixed diff --git a/src/pytorch_lightning/core/module.py b/src/pytorch_lightning/core/module.py index 54a8fd64cfc74..ed4868a497816 100644 --- a/src/pytorch_lightning/core/module.py +++ b/src/pytorch_lightning/core/module.py @@ -47,7 +47,7 @@ from pytorch_lightning.utilities import _IS_WINDOWS, _TORCH_GREATER_EQUAL_1_10, GradClipAlgorithmType from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.imports import _TORCH_GREATER_EQUAL_1_11, _TORCH_GREATER_EQUAL_1_13 -from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_deprecation, rank_zero_warn +from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_warn from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature from pytorch_lightning.utilities.types import ( _METRIC_COLLECTION, @@ -86,7 +86,6 @@ class LightningModule( "loggers", "automatic_optimization", "truncated_bptt_steps", - "use_amp", "trainer", "_running_torchscript", ] @@ -104,8 +103,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None: # pointer to the trainer object self._trainer: Optional["pl.Trainer"] = None - self._use_amp: bool = False - # the precision used self.precision: Union[int, str] = 32 @@ -1923,36 +1920,6 @@ def to_torchscript( return torchscript_module - @property - def use_amp(self) -> bool: - r""" - .. deprecated:: v1.6. - - This property was deprecated in v1.6 and will be removed in v1.8. - """ - if not self._running_torchscript: # remove with the deprecation removal - rank_zero_deprecation( - "`LightningModule.use_amp` was deprecated in v1.6 and will be removed in v1.8." - " Please use `Trainer.amp_backend`.", - stacklevel=5, - ) - return self._use_amp - - @use_amp.setter - def use_amp(self, use_amp: bool) -> None: - r""" - .. deprecated:: v1.6. - - This property was deprecated in v1.6 and will be removed in v1.8. - """ - if not self._running_torchscript: # remove with the deprecation removal - rank_zero_deprecation( - "`LightningModule.use_amp` was deprecated in v1.6 and will be removed in v1.8." - " Please use `Trainer.amp_backend`.", - stacklevel=5, - ) - self._use_amp = use_amp - @contextmanager def _prevent_trainer_and_dataloaders_deepcopy(self) -> Generator[None, None, None]: self._should_prevent_trainer_and_dataloaders_deepcopy = True diff --git a/src/pytorch_lightning/trainer/connectors/data_connector.py b/src/pytorch_lightning/trainer/connectors/data_connector.py index 7543172de9450..dce9a4fbaa8a5 100644 --- a/src/pytorch_lightning/trainer/connectors/data_connector.py +++ b/src/pytorch_lightning/trainer/connectors/data_connector.py @@ -156,8 +156,6 @@ def attach_data( def _copy_trainer_model_properties(self, model: "pl.LightningModule") -> None: model.trainer = proxy(self.trainer) - # Remove setting use_amp in v1.8 - model._use_amp = self.trainer.amp_backend is not None model.precision = self.trainer.precision def attach_dataloaders( diff --git a/src/pytorch_lightning/trainer/trainer.py b/src/pytorch_lightning/trainer/trainer.py index 14659bbd00a70..8188616e968d9 100644 --- a/src/pytorch_lightning/trainer/trainer.py +++ b/src/pytorch_lightning/trainer/trainer.py @@ -2185,14 +2185,6 @@ def log_dir(self) -> Optional[str]: dirpath = self.strategy.broadcast(dirpath) return dirpath - @property - def use_amp(self) -> bool: - rank_zero_deprecation( - "`Trainer.use_amp` is deprecated in v1.6.0 and will be removed in v1.8.0." - " Please use `Trainer.amp_backend` instead." - ) - return self.precision == 16 - @property def is_global_zero(self) -> bool: return self.strategy.is_global_zero diff --git a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py index f0910d57828c9..3a746b2a0ae9a 100644 --- a/tests/tests_pytorch/deprecated_api/test_remove_1-8.py +++ b/tests/tests_pytorch/deprecated_api/test_remove_1-8.py @@ -490,21 +490,6 @@ def on_load_checkpoint(self, checkpoint): _check_datamodule_checkpoint_hooks(trainer) -def test_v1_8_0_trainer_use_amp(tmpdir): - trainer = Trainer() - - with pytest.deprecated_call(match="`Trainer.use_amp` is deprecated in v1.6.0"): - _ = trainer.use_amp - - -def test_v1_8_0_lightning_module_use_amp(): - model = BoringModel() - with pytest.deprecated_call(match="`LightningModule.use_amp` was deprecated in v1.6"): - _ = model.use_amp - with pytest.deprecated_call(match="`LightningModule.use_amp` was deprecated in v1.6"): - model.use_amp = False - - def test_trainer_config_device_ids(): trainer = Trainer(devices=2) with pytest.deprecated_call(