diff --git a/CHANGELOG.md b/CHANGELOG.md index c4128fea0b..80c0ad14a3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,7 +12,6 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). ### Fixed -<<<<<<< HEAD - Fixed examples (question answering), where NLTK's `punkt` module needs to be downloaded first. ([#1215](https://github.com/PyTorchLightning/lightning-flash/pull/1215/files)) - Fixed normalizing inputs to video classification ([#1213](https://github.com/PyTorchLightning/lightning-flash/pull/1213)) - Fixed a bug where `pretraining_transforms` in the `ImageEmbedder` was never called. ([1196](https://github.com/PyTorchLightning/lightning-flash/pull/1196)) @@ -21,6 +20,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed DDP spawn support for `ObjectDetector`, `InstanceSegmentation`, and `KeypointDetector` ([#1222](https://github.com/PyTorchLightning/lightning-flash/pull/1222)) - Fixed a bug where `InstanceSegmentation` would fail if samples had an inconsistent number of bboxes, labels, and masks (these will now be treated as negative samples) ([#1222](https://github.com/PyTorchLightning/lightning-flash/pull/1222)) - Fixed a bug where collate functions were never called in the `ImageEmbedder` class. ([#1217](https://github.com/PyTorchLightning/lightning-flash/pull/1217)) +- Fixed a bug where `ObjectDetector`, `InstanceSegmentation`, and `KeypointDetector` would log train and validation metrics with the same name ([#1252](https://github.com/PyTorchLightning/lightning-flash/pull/1252)) ### Removed diff --git a/flash/core/data/utils.py b/flash/core/data/utils.py index 69294d4a21..f7b3c51a75 100644 --- a/flash/core/data/utils.py +++ b/flash/core/data/utils.py @@ -39,6 +39,7 @@ RunningStage.VALIDATING: "val", RunningStage.PREDICTING: "predict", RunningStage.SERVING: "serve", + RunningStage.SANITY_CHECKING: "val", } _STAGES_PREFIX_VALUES = {"train", "test", "val", "predict", "serve"} diff --git a/flash/core/integrations/icevision/adapter.py b/flash/core/integrations/icevision/adapter.py index bff52bc5e9..332a2fdc8c 100644 --- a/flash/core/integrations/icevision/adapter.py +++ b/flash/core/integrations/icevision/adapter.py @@ -40,8 +40,8 @@ class SimpleCOCOMetric(COCOMetric): def finalize(self) -> Dict[str, float]: logs = super().finalize() return { - "Precision (IoU=0.50:0.95,area=all)": logs["AP (IoU=0.50:0.95) area=all"], - "Recall (IoU=0.50:0.95,area=all,maxDets=100)": logs["AR (IoU=0.50:0.95) area=all maxDets=100"], + "precision (IoU=0.50:0.95,area=all)": logs["AP (IoU=0.50:0.95) area=all"], + "recall (IoU=0.50:0.95,area=all,maxDets=100)": logs["AR (IoU=0.50:0.95) area=all maxDets=100"], } diff --git a/flash/core/integrations/icevision/backbones.py b/flash/core/integrations/icevision/backbones.py index 80d92f1bbe..5280af6a8e 100644 --- a/flash/core/integrations/icevision/backbones.py +++ b/flash/core/integrations/icevision/backbones.py @@ -15,6 +15,7 @@ from torch import nn +from flash.core.data.utils import _STAGES_PREFIX from flash.core.registry import FlashRegistry from flash.core.utilities.imports import _ICEVISION_AVAILABLE @@ -22,17 +23,19 @@ from icevision.backbones import BackboneConfig -def _log_with_prog_bar_override(self, name, value, **kwargs): +def _log_with_name_and_prog_bar_override(self, name, value, **kwargs): if "prog_bar" not in kwargs: kwargs["prog_bar"] = True - return self._original_log(name.split("/")[-1], value, **kwargs) + metric = name.split("/")[-1] + metric = f"{_STAGES_PREFIX[self.trainer.state.stage]}_{metric}" + return self._original_log(metric, value, **kwargs) def icevision_model_adapter(model_type): adapter = model_type.lightning.ModelAdapter if not hasattr(adapter, "_original_log"): adapter._original_log = adapter.log - adapter.log = _log_with_prog_bar_override + adapter.log = _log_with_name_and_prog_bar_override return adapter