From 8c7e6b5c465fb689b4b775e1206c8c06ee874166 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 9 Jan 2023 18:38:09 +0100 Subject: [PATCH 1/3] Do not warn about the scheduler's interval key during manual optimization --- src/pytorch_lightning/core/optimizer.py | 4 +++- .../trainer/optimization/test_manual_optimization.py | 4 +++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/src/pytorch_lightning/core/optimizer.py b/src/pytorch_lightning/core/optimizer.py index 732ddd8b7cc7f..430c92ee62de8 100644 --- a/src/pytorch_lightning/core/optimizer.py +++ b/src/pytorch_lightning/core/optimizer.py @@ -322,7 +322,9 @@ def _configure_schedulers_manual_opt(schedulers: list) -> List[LRSchedulerConfig lr_scheduler_configs = [] for scheduler in schedulers: if isinstance(scheduler, dict): - invalid_keys = {"interval", "frequency", "reduce_on_plateau", "monitor", "strict"} + # interval is not in this list even though the user needs to manually call the scheduler because + # the `LearningRateMonitor` callback needs to check its value to know when to log the learning rate + invalid_keys = {"frequency", "reduce_on_plateau", "monitor", "strict"} keys_to_warn = [k for k in scheduler.keys() if k in invalid_keys] if keys_to_warn: diff --git a/tests/tests_pytorch/trainer/optimization/test_manual_optimization.py b/tests/tests_pytorch/trainer/optimization/test_manual_optimization.py index 9c3f85c5ab647..c8e20bb4427a7 100644 --- a/tests/tests_pytorch/trainer/optimization/test_manual_optimization.py +++ b/tests/tests_pytorch/trainer/optimization/test_manual_optimization.py @@ -933,6 +933,7 @@ def configure_optimizers(self): scheduler = { "scheduler": torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer), "monitor": "train_loss", + "interval": "step", # not warned } else: scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer) @@ -946,8 +947,9 @@ def configure_optimizers(self): ) if scheduler_as_dict: - with pytest.warns(RuntimeWarning, match="but the keys will be ignored"): + with pytest.warns(RuntimeWarning, match=r"\['monitor'\], but the keys will be ignored"): trainer.fit(model) + assert trainer.lr_scheduler_configs[0].interval == "step" else: trainer.fit(model) From eb3768a69d5c61de4866057fa633a6c257644b03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Mon, 9 Jan 2023 18:41:28 +0100 Subject: [PATCH 2/3] # --- src/pytorch_lightning/CHANGELOG.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/pytorch_lightning/CHANGELOG.md b/src/pytorch_lightning/CHANGELOG.md index 888fe0d8d21f9..ef7500745ea6d 100644 --- a/src/pytorch_lightning/CHANGELOG.md +++ b/src/pytorch_lightning/CHANGELOG.md @@ -157,6 +157,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed a type error when dividing the chunk size in the ColossalAI strategy ([#16212](https://github.com/Lightning-AI/lightning/pull/16212)) +- Fixed bug where the ``interval`` key of the scheduler would be ignored during manual optimization, making the LearningRateMonitor callback fail to log the learning rate ([#16308](https://github.com/Lightning-AI/lightning/pull/16308)) + + ## [1.8.6] - 2022-12-21 - minor cleaning From c672d9623af7db103166539f2e2eee9d4a49e000 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Carlos=20Mochol=C3=AD?= Date: Tue, 10 Jan 2023 14:26:51 +0100 Subject: [PATCH 3/3] Duplicate test --- .../trainer/optimization/test_optimizers.py | 19 ------------------- 1 file changed, 19 deletions(-) diff --git a/tests/tests_pytorch/trainer/optimization/test_optimizers.py b/tests/tests_pytorch/trainer/optimization/test_optimizers.py index ed821b0d6ff4c..95ab17e513f7a 100644 --- a/tests/tests_pytorch/trainer/optimization/test_optimizers.py +++ b/tests/tests_pytorch/trainer/optimization/test_optimizers.py @@ -557,25 +557,6 @@ def configure_optimizers(self): trainer.fit(model) -def test_warn_invalid_scheduler_key_in_manual_optimization(tmpdir): - """Test warning when invalid scheduler keys are provided in manual optimization.""" - - class TestModel(BoringModel): - def __init__(self): - super().__init__() - self.automatic_optimization = False - - def configure_optimizers(self): - opt = optim.SGD(self.layer.parameters(), lr=0.1) - sch = optim.lr_scheduler.StepLR(opt, step_size=1) - return [opt], [{"scheduler": sch, "interval": "epoch"}] - - model = TestModel() - trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=True) - with pytest.warns(RuntimeWarning, match="the keys will be ignored"): - trainer.fit(model) - - @RunIf(min_cuda_gpus=2, standalone=True) def test_optimizer_state_on_device(tmpdir): """Test that optimizers that create state initially at instantiation still end up with the state on the GPU."""