Skip to content

Commit be8ab04

Browse files
awaelchlirohitgr7
authored andcommitted
Remove deprecated use_amp attributes (#14832)
* Remove deprecated use_amp attributes * chlog
1 parent d4c4060 commit be8ab04

File tree

5 files changed

+4
-59
lines changed

5 files changed

+4
-59
lines changed

src/pytorch_lightning/CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
198198
- Removed the deprecated way to set the distributed backend via the environment variable `PL_TORCH_DISTRIBUTED_BACKEND`, in favor of setting the `process_group_backend` in the strategy constructor ([#14693](https://github.com/Lightning-AI/lightning/pull/14693))
199199

200200

201+
- Removed the deprecated `Trainer.use_amp` and `LightningModule.use_amp` attributes ([#14832](https://github.com/Lightning-AI/lightning/pull/14832))
202+
203+
201204

202205
### Fixed
203206

src/pytorch_lightning/core/module.py

Lines changed: 1 addition & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@
4747
from pytorch_lightning.utilities import _IS_WINDOWS, _TORCH_GREATER_EQUAL_1_10, GradClipAlgorithmType
4848
from pytorch_lightning.utilities.exceptions import MisconfigurationException
4949
from pytorch_lightning.utilities.imports import _TORCH_GREATER_EQUAL_1_11, _TORCH_GREATER_EQUAL_1_13
50-
from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_deprecation, rank_zero_warn
50+
from pytorch_lightning.utilities.rank_zero import rank_zero_debug, rank_zero_warn
5151
from pytorch_lightning.utilities.signature_utils import is_param_in_hook_signature
5252
from pytorch_lightning.utilities.types import (
5353
_METRIC_COLLECTION,
@@ -86,7 +86,6 @@ class LightningModule(
8686
"loggers",
8787
"automatic_optimization",
8888
"truncated_bptt_steps",
89-
"use_amp",
9089
"trainer",
9190
"_running_torchscript",
9291
]
@@ -104,8 +103,6 @@ def __init__(self, *args: Any, **kwargs: Any) -> None:
104103
# pointer to the trainer object
105104
self._trainer: Optional["pl.Trainer"] = None
106105

107-
self._use_amp: bool = False
108-
109106
# the precision used
110107
self.precision: Union[int, str] = 32
111108

@@ -1923,36 +1920,6 @@ def to_torchscript(
19231920

19241921
return torchscript_module
19251922

1926-
@property
1927-
def use_amp(self) -> bool:
1928-
r"""
1929-
.. deprecated:: v1.6.
1930-
1931-
This property was deprecated in v1.6 and will be removed in v1.8.
1932-
"""
1933-
if not self._running_torchscript: # remove with the deprecation removal
1934-
rank_zero_deprecation(
1935-
"`LightningModule.use_amp` was deprecated in v1.6 and will be removed in v1.8."
1936-
" Please use `Trainer.amp_backend`.",
1937-
stacklevel=5,
1938-
)
1939-
return self._use_amp
1940-
1941-
@use_amp.setter
1942-
def use_amp(self, use_amp: bool) -> None:
1943-
r"""
1944-
.. deprecated:: v1.6.
1945-
1946-
This property was deprecated in v1.6 and will be removed in v1.8.
1947-
"""
1948-
if not self._running_torchscript: # remove with the deprecation removal
1949-
rank_zero_deprecation(
1950-
"`LightningModule.use_amp` was deprecated in v1.6 and will be removed in v1.8."
1951-
" Please use `Trainer.amp_backend`.",
1952-
stacklevel=5,
1953-
)
1954-
self._use_amp = use_amp
1955-
19561923
@contextmanager
19571924
def _prevent_trainer_and_dataloaders_deepcopy(self) -> Generator[None, None, None]:
19581925
self._should_prevent_trainer_and_dataloaders_deepcopy = True

src/pytorch_lightning/trainer/connectors/data_connector.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -156,8 +156,6 @@ def attach_data(
156156

157157
def _copy_trainer_model_properties(self, model: "pl.LightningModule") -> None:
158158
model.trainer = proxy(self.trainer)
159-
# Remove setting use_amp in v1.8
160-
model._use_amp = self.trainer.amp_backend is not None
161159
model.precision = self.trainer.precision
162160

163161
def attach_dataloaders(

src/pytorch_lightning/trainer/trainer.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -2185,14 +2185,6 @@ def log_dir(self) -> Optional[str]:
21852185
dirpath = self.strategy.broadcast(dirpath)
21862186
return dirpath
21872187

2188-
@property
2189-
def use_amp(self) -> bool:
2190-
rank_zero_deprecation(
2191-
"`Trainer.use_amp` is deprecated in v1.6.0 and will be removed in v1.8.0."
2192-
" Please use `Trainer.amp_backend` instead."
2193-
)
2194-
return self.precision == 16
2195-
21962188
@property
21972189
def is_global_zero(self) -> bool:
21982190
return self.strategy.is_global_zero

tests/tests_pytorch/deprecated_api/test_remove_1-8.py

Lines changed: 0 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -490,21 +490,6 @@ def on_load_checkpoint(self, checkpoint):
490490
_check_datamodule_checkpoint_hooks(trainer)
491491

492492

493-
def test_v1_8_0_trainer_use_amp(tmpdir):
494-
trainer = Trainer()
495-
496-
with pytest.deprecated_call(match="`Trainer.use_amp` is deprecated in v1.6.0"):
497-
_ = trainer.use_amp
498-
499-
500-
def test_v1_8_0_lightning_module_use_amp():
501-
model = BoringModel()
502-
with pytest.deprecated_call(match="`LightningModule.use_amp` was deprecated in v1.6"):
503-
_ = model.use_amp
504-
with pytest.deprecated_call(match="`LightningModule.use_amp` was deprecated in v1.6"):
505-
model.use_amp = False
506-
507-
508493
def test_trainer_config_device_ids():
509494
trainer = Trainer(devices=2)
510495
with pytest.deprecated_call(

0 commit comments

Comments
 (0)