Skip to content

Commit

Permalink
Warn when self.log(..., logger=True) is called without a logger (#1…
Browse files Browse the repository at this point in the history
  • Loading branch information
carmocca authored Nov 25, 2022
1 parent 4e64391 commit 2c3cc74
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 5 deletions.
2 changes: 2 additions & 0 deletions src/pytorch_lightning/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Added a check to validate that wrapped FSDP models are used while initializing optimizers ([#15301](https://github.com/Lightning-AI/lightning/pull/15301))


- Added a warning when `self.log(..., logger=True)` is called without a configured logger ([#15814](https://github.com/Lightning-AI/lightning/pull/15814))

### Changed

- Drop PyTorch 1.9 support ([#15347](https://github.com/Lightning-AI/lightning/pull/15347))
Expand Down
14 changes: 12 additions & 2 deletions src/pytorch_lightning/core/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ def log(
name: str,
value: _METRIC_COLLECTION,
prog_bar: bool = False,
logger: bool = True,
logger: Optional[bool] = None,
on_step: Optional[bool] = None,
on_epoch: Optional[bool] = None,
reduce_fx: Union[str, Callable] = "mean",
Expand Down Expand Up @@ -438,6 +438,16 @@ def log(
"With `def training_step(self, dataloader_iter)`, `self.log(..., batch_size=...)` should be provided."
)

if logger and self.trainer.logger is None:
rank_zero_warn(
f"You called `self.log({name!r}, ..., logger=True)` but have no logger configured. You can enable one"
" by doing `Trainer(logger=ALogger(...))`"
)
if logger is None:
# we could set false here if there's no configured logger, however, we still need to compute the "logged"
# metrics anyway because that's what the evaluation loops use as return value
logger = True

results.log(
self._current_fx_name,
name,
Expand All @@ -463,7 +473,7 @@ def log_dict(
self,
dictionary: Mapping[str, _METRIC_COLLECTION],
prog_bar: bool = False,
logger: bool = True,
logger: Optional[bool] = None,
on_step: Optional[bool] = None,
on_epoch: Optional[bool] = None,
reduce_fx: Union[str, Callable] = "mean",
Expand Down
17 changes: 14 additions & 3 deletions tests/tests_pytorch/trainer/logging_/test_logger_connector.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from pytorch_lightning import LightningModule
from pytorch_lightning.callbacks.callback import Callback
from pytorch_lightning.demos.boring_classes import BoringModel, RandomDataset
from pytorch_lightning.loggers import CSVLogger
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.trainer.connectors.logger_connector.fx_validator import _FxValidator
from pytorch_lightning.trainer.connectors.logger_connector.result import _ResultCollection
Expand Down Expand Up @@ -605,15 +606,25 @@ def test_result_collection_on_tensor_with_mean_reduction():
}


def test_logged_metrics_has_logged_epoch_value(tmpdir):
@pytest.mark.parametrize("logger", (False, True))
def test_logged_metrics_has_logged_epoch_value(tmpdir, logger):
class TestModel(BoringModel):
def training_step(self, batch, batch_idx):
self.log("epoch", -batch_idx, logger=True)
return super().training_step(batch, batch_idx)

model = TestModel()
trainer = Trainer(default_root_dir=tmpdir, fast_dev_run=2)
trainer.fit(model)
trainer_kwargs = dict(
default_root_dir=tmpdir, limit_train_batches=2, limit_val_batches=0, max_epochs=1, logger=False
)
if logger:
trainer_kwargs["logger"] = CSVLogger(tmpdir)
trainer = Trainer(**trainer_kwargs)
if not logger:
with pytest.warns(match=r"log\('epoch', ..., logger=True\)` but have no logger"):
trainer.fit(model)
else:
trainer.fit(model)

# should not get overridden if logged manually
assert trainer.logged_metrics == {"epoch": -1}
Expand Down

0 comments on commit 2c3cc74

Please sign in to comment.