Skip to content
Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

- Removed the need to explicitly load habana module ([#13338](https://github.com/PyTorchLightning/pytorch-lightning/pull/13338))

- Removed the deprecated `pytorch_lightning.callbacks.lr_monitor.LearningRateMonitor.lr_sch_names` ([#13353](https://github.com/Lightning-AI/lightning/pull/13353))

- Removed the deprecated `remove-pytorch_lightning.callbacks.lr_monitor.LearningRateMonitor.add_lr_sch_names` ([#13353](https://github.com/Lightning-AI/lightning/pull/13353))


### Fixed

Expand Down
26 changes: 5 additions & 21 deletions src/pytorch_lightning/callbacks/lr_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,6 @@ def __init__(self, logging_interval: Optional[str] = None, log_momentum: bool =
self.logging_interval = logging_interval
self.log_momentum = log_momentum
self.lrs: Dict[str, List[float]] = {}
self._lr_sch_names: List[str] = []

def on_train_start(self, trainer: "pl.Trainer", *args: Any, **kwargs: Any) -> None:
"""Called before training, determines unique names for all lr schedulers in the case of multiple of the
Expand Down Expand Up @@ -176,7 +175,7 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
scheduler_hparam_keys,
optimizers_with_scheduler,
optimizers_with_scheduler_types,
) = self._find_names_from_schedulers(trainer.lr_scheduler_configs, add_lr_sch_names=False)
) = self._find_names_from_schedulers(trainer.lr_scheduler_configs)
self._remap_keys(scheduler_hparam_keys)

for name, config in zip(scheduler_hparam_keys, trainer.lr_scheduler_configs):
Expand All @@ -189,7 +188,6 @@ def _extract_stats(self, trainer: "pl.Trainer", interval: str) -> Dict[str, floa
trainer.optimizers,
seen_optimizers=optimizers_with_scheduler,
seen_optimizer_types=optimizers_with_scheduler_types,
add_lr_sch_names=False,
)
self._remap_keys(optimizer_hparam_keys)

Expand Down Expand Up @@ -264,7 +262,8 @@ def _duplicate_param_group_names(self, param_groups: List[Dict]) -> Set[str]:
return {n for n in names if names.count(n) > 1}

def _find_names_from_schedulers(
self, lr_scheduler_configs: List[LRSchedulerConfig], add_lr_sch_names: bool = True
self,
lr_scheduler_configs: List[LRSchedulerConfig],
) -> Tuple[List[List[str]], List[Optimizer], DefaultDict[Type[Optimizer], int]]:
# Create unique names in the case we have multiple of the same learning
# rate scheduler + multiple parameter groups
Expand All @@ -279,7 +278,7 @@ def _find_names_from_schedulers(
name = "lr-" + sch.optimizer.__class__.__name__

updated_names = self._check_duplicates_and_update_name(
sch.optimizer, name, seen_optimizers, seen_optimizer_types, config, add_lr_sch_names
sch.optimizer, name, seen_optimizers, seen_optimizer_types, config
)
names.append(updated_names)

Expand All @@ -290,7 +289,6 @@ def _find_names_from_optimizers(
optimizers: List[Any],
seen_optimizers: List[Optimizer],
seen_optimizer_types: DefaultDict[Type[Optimizer], int],
add_lr_sch_names: bool = True,
) -> Tuple[List[List[str]], List[Optimizer]]:
names = []
optimizers_without_scheduler = []
Expand All @@ -303,7 +301,7 @@ def _find_names_from_optimizers(

name = "lr-" + optimizer.__class__.__name__
updated_names = self._check_duplicates_and_update_name(
optimizer, name, seen_optimizers, seen_optimizer_types, None, add_lr_sch_names
optimizer, name, seen_optimizers, seen_optimizer_types, None
)
names.append(updated_names)
optimizers_without_scheduler.append(optimizer)
Expand All @@ -317,7 +315,6 @@ def _check_duplicates_and_update_name(
seen_optimizers: List[Optimizer],
seen_optimizer_types: DefaultDict[Type[Optimizer], int],
lr_scheduler_config: Optional[LRSchedulerConfig],
add_lr_sch_names: bool = True,
) -> List[str]:
seen_optimizers.append(optimizer)
optimizer_cls = type(optimizer)
Expand All @@ -338,17 +335,4 @@ def _check_duplicates_and_update_name(
name = self._add_prefix(name, optimizer_cls, seen_optimizer_types)
name_list = [self._add_suffix(name, param_groups, i) for i in range(len(param_groups))]

if add_lr_sch_names:
self._lr_sch_names.append(name)

return name_list

@property
def lr_sch_names(self) -> List[str]:
# TODO remove `lr_sch_names` and `add_lr_sch_names` argument in v1.7.0
rank_zero_deprecation(
"`LearningRateMonitor.lr_sch_names` has been deprecated in v1.5 and will be removed in 1.7."
" Consider accessing them using `LearningRateMonitor.lrs.keys()` which will return"
" the names of all the optimizers, even those without a scheduler."
)
return self._lr_sch_names
10 changes: 0 additions & 10 deletions tests/tests_pytorch/deprecated_api/test_remove_1-7.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,16 +124,6 @@ def test_v1_7_0_deprecated_max_steps_none(tmpdir):
trainer.fit_loop.max_steps = None


def test_v1_7_0_deprecate_lr_sch_names(tmpdir):
model = BoringModel()
lr_monitor = LearningRateMonitor()
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True, callbacks=[lr_monitor])
trainer.fit(model)

with pytest.deprecated_call(match="`LearningRateMonitor.lr_sch_names` has been deprecated in v1.5"):
assert lr_monitor.lr_sch_names == ["lr-SGD"]


@pytest.mark.parametrize(
"cls",
[
Expand Down