Skip to content

Commit 111d9c7

Browse files
authored
Prune deprecated hparams setter (#6207)
1 parent 40d5a9d commit 111d9c7

File tree

5 files changed

+17
-101
lines changed

5 files changed

+17
-101
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
4343
- Removed `mode='auto'` from `EarlyStopping` ([#6167](https://github.com/PyTorchLightning/pytorch-lightning/pull/6167))
4444

4545

46+
- Removed deprecated `LightningModule` `hparams` setter ([#6207](https://github.com/PyTorchLightning/pytorch-lightning/pull/6207))
47+
48+
4649
### Fixed
4750

4851
- Made the `Plugin.reduce` method more consistent across all Plugins to reflect a mean-reduction by default ([#6011](https://github.com/PyTorchLightning/pytorch-lightning/pull/6011))

pytorch_lightning/core/lightning.py

Lines changed: 0 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
import copy
1818
import inspect
1919
import os
20-
import re
2120
import tempfile
2221
import uuid
2322
from abc import ABC
@@ -1806,39 +1805,6 @@ def hparams_initial(self) -> AttributeDict:
18061805
# prevent any change
18071806
return copy.deepcopy(self._hparams_initial)
18081807

1809-
@hparams.setter
1810-
def hparams(self, hp: Union[dict, Namespace, Any]):
1811-
# TODO: remove this method in v1.3.0.
1812-
rank_zero_warn(
1813-
"The setter for self.hparams in LightningModule is deprecated since v1.1.0 and will be"
1814-
" removed in v1.3.0. Replace the assignment `self.hparams = hparams` with "
1815-
" `self.save_hyperparameters()`.", DeprecationWarning
1816-
)
1817-
hparams_assignment_name = self.__get_hparams_assignment_variable()
1818-
self._hparams_name = hparams_assignment_name
1819-
self._set_hparams(hp)
1820-
# this resolves case when user does not uses `save_hyperparameters` and do hard assignement in init
1821-
if not hasattr(self, "_hparams_initial"):
1822-
self._hparams_initial = copy.deepcopy(self._hparams)
1823-
1824-
def __get_hparams_assignment_variable(self):
1825-
"""
1826-
looks at the code of the class to figure out what the user named self.hparams
1827-
this only happens when the user explicitly sets self.hparams
1828-
"""
1829-
try:
1830-
class_code = inspect.getsource(self.__class__)
1831-
lines = class_code.split("\n")
1832-
for line in lines:
1833-
line = re.sub(r"\s+", "", line, flags=re.UNICODE)
1834-
if ".hparams=" in line:
1835-
return line.split("=")[1]
1836-
# todo: specify the possible exception
1837-
except Exception:
1838-
return "hparams"
1839-
1840-
return None
1841-
18421808
@property
18431809
def model_size(self) -> float:
18441810
# todo: think about better way without need to dump model to drive

tests/deprecated_api/test_remove_1-3.py

Lines changed: 0 additions & 30 deletions
This file was deleted.

tests/models/test_hparams.py

Lines changed: 5 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -41,14 +41,6 @@ def __init__(self, hparams):
4141
self.save_hyperparameters(hparams)
4242

4343

44-
class AssignHparamsModel(BoringModel):
45-
""" Tests that a model can take an object with explicit setter """
46-
47-
def __init__(self, hparams):
48-
super().__init__()
49-
self.hparams = hparams
50-
51-
5244
def decorate(func):
5345

5446
@functools.wraps(func)
@@ -68,16 +60,6 @@ def __init__(self, hparams, *my_args, **my_kwargs):
6860
self.save_hyperparameters(hparams)
6961

7062

71-
class AssignHparamsDecoratedModel(BoringModel):
72-
""" Tests that a model can take an object with explicit setter"""
73-
74-
@decorate
75-
@decorate
76-
def __init__(self, hparams, *my_args, **my_kwargs):
77-
super().__init__()
78-
self.hparams = hparams
79-
80-
8163
# -------------------------
8264
# STANDARD TESTS
8365
# -------------------------
@@ -114,7 +96,7 @@ def _run_standard_hparams_test(tmpdir, model, cls, try_overwrite=False):
11496

11597

11698
@pytest.mark.parametrize(
117-
"cls", [SaveHparamsModel, AssignHparamsModel, SaveHparamsDecoratedModel, AssignHparamsDecoratedModel]
99+
"cls", [SaveHparamsModel, SaveHparamsDecoratedModel]
118100
)
119101
def test_namespace_hparams(tmpdir, cls):
120102
# init model
@@ -125,7 +107,7 @@ def test_namespace_hparams(tmpdir, cls):
125107

126108

127109
@pytest.mark.parametrize(
128-
"cls", [SaveHparamsModel, AssignHparamsModel, SaveHparamsDecoratedModel, AssignHparamsDecoratedModel]
110+
"cls", [SaveHparamsModel, SaveHparamsDecoratedModel]
129111
)
130112
def test_dict_hparams(tmpdir, cls):
131113
# init model
@@ -136,7 +118,7 @@ def test_dict_hparams(tmpdir, cls):
136118

137119

138120
@pytest.mark.parametrize(
139-
"cls", [SaveHparamsModel, AssignHparamsModel, SaveHparamsDecoratedModel, AssignHparamsDecoratedModel]
121+
"cls", [SaveHparamsModel, SaveHparamsDecoratedModel]
140122
)
141123
def test_omega_conf_hparams(tmpdir, cls):
142124
# init model
@@ -580,8 +562,7 @@ class SuperClassPositionalArgs(BoringModel):
580562

581563
def __init__(self, hparams):
582564
super().__init__()
583-
self._hparams = None # pretend BoringModel did not call self.save_hyperparameters()
584-
self.hparams = hparams
565+
self._hparams = hparams # pretend BoringModel did not call self.save_hyperparameters()
585566

586567

587568
class SubClassVarArgs(SuperClassPositionalArgs):
@@ -617,8 +598,6 @@ def test_init_arg_with_runtime_change(tmpdir, cls):
617598
assert model.hparams.running_arg == 123
618599
model.hparams.running_arg = -1
619600
assert model.hparams.running_arg == -1
620-
model.hparams = Namespace(abc=42)
621-
assert model.hparams.abc == 42
622601

623602
trainer = Trainer(
624603
default_root_dir=tmpdir,
@@ -664,18 +643,11 @@ class TestHydraModel(BoringModel):
664643

665644
def __init__(self, args_0, args_1, args_2, kwarg_1=None):
666645
self.save_hyperparameters()
667-
self.test_hparams()
668-
config_file = f"{tmpdir}/hparams.yaml"
669-
save_hparams_to_yaml(config_file, self.hparams)
670-
self.hparams = load_hparams_from_yaml(config_file)
671-
self.test_hparams()
672-
super().__init__()
673-
674-
def test_hparams(self):
675646
assert self.hparams.args_0.log == "Something"
676647
assert self.hparams.args_1['cfg'].log == "Something"
677648
assert self.hparams.args_2[0].log == "Something"
678649
assert self.hparams.kwarg_1['cfg'][0].log == "Something"
650+
super().__init__()
679651

680652
with initialize(config_path="conf"):
681653
args_0 = compose(config_name="config")

tests/trainer/test_trainer_tricks.py

Lines changed: 9 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@
2323
from pytorch_lightning.utilities import _NATIVE_AMP_AVAILABLE, AMPType
2424
from pytorch_lightning.utilities.exceptions import MisconfigurationException
2525
from tests.base import EvalModelTemplate
26+
from tests.helpers import BoringModel
2627
from tests.helpers.datamodules import MNISTDataModule
2728

2829

@@ -282,10 +283,14 @@ def dataloader(self, *args, **kwargs):
282283

283284
def test_auto_scale_batch_size_duplicate_attribute_warning(tmpdir):
284285
""" Test for a warning when model.batch_size and model.hparams.batch_size both present. """
285-
hparams = EvalModelTemplate.get_default_hparams()
286-
model = EvalModelTemplate(**hparams)
287-
model.hparams = hparams
288-
# now we have model.batch_size and model.hparams.batch_size
286+
class TestModel(BoringModel):
287+
def __init__(self, batch_size=1):
288+
super().__init__()
289+
# now we have model.batch_size and model.hparams.batch_size
290+
self.batch_size = 1
291+
self.save_hyperparameters()
292+
293+
model = TestModel()
289294
trainer = Trainer(default_root_dir=tmpdir, max_steps=1, max_epochs=1000, auto_scale_batch_size=True)
290295
expected_message = "Field `model.batch_size` and `model.hparams.batch_size` are mutually exclusive!"
291296
with pytest.warns(UserWarning, match=expected_message):

0 commit comments

Comments
 (0)