Skip to content

Commit

Permalink
Fix Learning Rate and Loss Handling in Tile Classifier MaskRCNN Effic…
Browse files Browse the repository at this point in the history
…ientNet (#3873)

* fixes

* add test case
  • Loading branch information
eugene123tw authored Aug 23, 2024
1 parent 7bdf708 commit 1fced5a
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,8 @@ def forward_train(
img, img_metas, gt_bboxes, gt_labels, gt_bboxes_ignore, gt_masks, proposals, **kwargs
)
losses.update(rcnn_loss)
if "acc" in losses:
losses.pop("acc")
return losses

@staticmethod
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,8 @@ def patch_tiling(config, hparams, dataset=None):

if config.model.backbone.type == "efficientnet_b2b":
learning_rate = 0.002
logger.info(
f"Patched {config.model.backbone.type} LR: "
f"{hparams.learning_parameters.learning_rate} -> {learning_rate}"
)
hparams.learning_parameters.learning_rate = learning_rate
logger.info(f"Patched {config.model.backbone.type} LR: " f"{config.optimizer.lr} -> {learning_rate}")
config.optimizer.lr = learning_rate

config.data.train.filter_empty_gt = False

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -211,3 +211,4 @@ def test_patch_tiling_func(self):
hyper_parameters.tiling_parameters.enable_tiling = True
hyper_parameters.tiling_parameters.enable_tile_classifier = True
patch_tiling(cfg, hyper_parameters, self.dataset)
assert cfg.optimizer.lr == 0.002, "Learning rate should be 0.002 when MRCNN EfficientNet is used"

0 comments on commit 1fced5a

Please sign in to comment.