From 84709bb0bccd08bc2247fe1f557c915cdd10a663 Mon Sep 17 00:00:00 2001 From: Al3xDo Date: Tue, 13 Dec 2022 20:54:20 +0700 Subject: [PATCH] adding hint to the logger's error messages --- .../logger_connector/fx_validator.py | 2 +- .../trainer/logging_/test_logger_connector.py | 56 +++++++++---------- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py b/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py index f1478ecbf9cbe5..cd0c425f54e6f3 100644 --- a/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py +++ b/src/pytorch_lightning/trainer/connectors/logger_connector/fx_validator.py @@ -174,7 +174,7 @@ def check_logging(cls, fx_name: str) -> None: ) if cls.functions[fx_name] is None: - raise MisconfigurationException(f"You can't `self.log()` inside `{fx_name}`.") + raise MisconfigurationException(f"You can alternatively log with `logger.experiment` inside `{fx_name}`.") @classmethod def get_default_logging_levels( diff --git a/tests/tests_pytorch/trainer/logging_/test_logger_connector.py b/tests/tests_pytorch/trainer/logging_/test_logger_connector.py index 1ffe7ffe9defb6..bf11418b00861b 100644 --- a/tests/tests_pytorch/trainer/logging_/test_logger_connector.py +++ b/tests/tests_pytorch/trainer/logging_/test_logger_connector.py @@ -133,7 +133,7 @@ def test_fx_validator(): validator.check_logging_levels(fx_name=func_name, on_step=True, on_epoch=on_epoch) else: assert func_name in not_supported - with pytest.raises(MisconfigurationException, match="You can't"): + with pytest.raises(MisconfigurationException, match="You can alternatively log with"): validator.check_logging(fx_name=func_name) with pytest.raises(RuntimeError, match="Logging inside `foo` is not implemented"): @@ -188,29 +188,29 @@ def test_fx_validator_integration(tmpdir): """Tries to log inside all `LightningModule` and `Callback` hooks to check any expected errors.""" not_supported = { None: "`self.trainer` reference is not registered", - "setup": "You can't", - "configure_sharded_model": "You can't", - "configure_optimizers": "You can't", - "on_fit_start": "You can't", - "train_dataloader": "You can't", - "val_dataloader": "You can't", - "on_before_batch_transfer": "You can't", - "transfer_batch_to_device": "You can't", - "on_after_batch_transfer": "You can't", - "on_validation_end": "You can't", - "on_train_end": "You can't", - "on_fit_end": "You can't", - "teardown": "You can't", - "on_sanity_check_start": "You can't", - "on_sanity_check_end": "You can't", - "prepare_data": "You can't", - "configure_callbacks": "You can't", - "on_validation_model_eval": "You can't", - "on_validation_model_train": "You can't", - "lr_scheduler_step": "You can't", - "on_save_checkpoint": "You can't", - "on_load_checkpoint": "You can't", - "on_exception": "You can't", + "setup": "You can alternatively log with", + "configure_sharded_model": "You can alternatively log with", + "configure_optimizers": "You can alternatively log with", + "on_fit_start": "You can alternatively log with", + "train_dataloader": "You can alternatively log with", + "val_dataloader": "You can alternatively log with", + "on_before_batch_transfer": "You can alternatively log with", + "transfer_batch_to_device": "You can alternatively log with", + "on_after_batch_transfer": "You can alternatively log with", + "on_validation_end": "You can alternatively log with", + "on_train_end": "You can alternatively log with", + "on_fit_end": "You can alternatively log with", + "teardown": "You can alternatively log with", + "on_sanity_check_start": "You can alternatively log with", + "on_sanity_check_end": "You can alternatively log with", + "prepare_data": "You can alternatively log with", + "configure_callbacks": "You can alternatively log with", + "on_validation_model_eval": "You can alternatively log with", + "on_validation_model_train": "You can alternatively log with", + "lr_scheduler_step": "You can alternatively log with", + "on_save_checkpoint": "You can alternatively log with", + "on_load_checkpoint": "You can alternatively log with", + "on_exception": "You can alternatively log with", } model = HookedModel(not_supported) @@ -232,10 +232,10 @@ def test_fx_validator_integration(tmpdir): not_supported.update( { # `lightning_module` ref is now present from the `fit` call - "test_dataloader": "You can't", - "on_test_model_eval": "You can't", - "on_test_model_train": "You can't", - "on_test_end": "You can't", + "test_dataloader": "You can alternatively log with", + "on_test_model_eval": "You can alternatively log with", + "on_test_model_train": "You can alternatively log with", + "on_test_end": "You can alternatively log with", } ) trainer.test(model, verbose=False)