From 3b828eca80acdadea72bb69342942a96f10411c5 Mon Sep 17 00:00:00 2001 From: alekseysh Date: Fri, 5 Jul 2024 11:04:18 +0600 Subject: [PATCH] upd --- oml/interfaces/models.py | 2 +- oml/models/meta/projection.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/oml/interfaces/models.py b/oml/interfaces/models.py index 91ef492e..6c080ebe 100644 --- a/oml/interfaces/models.py +++ b/oml/interfaces/models.py @@ -22,7 +22,7 @@ def feat_dim(self) -> int: raise NotImplementedError() @classmethod - def from_pretrained(cls, weights: str, **kwargs: Dict[str, Any]) -> "IExtractor": + def from_pretrained(cls, weights: str, **kwargs) -> "IExtractor": # type: ignore """ This method allows to download a pretrained checkpoint. The class field ``self.pretrained_models`` is the dictionary which keeps records of all the available diff --git a/oml/models/meta/projection.py b/oml/models/meta/projection.py index ad954acb..703f3c79 100644 --- a/oml/models/meta/projection.py +++ b/oml/models/meta/projection.py @@ -1,5 +1,5 @@ from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import List, Optional, Union import torch from torchvision.ops import MLP @@ -88,7 +88,7 @@ def unfreeze(self) -> None: self.train_backbone = True @classmethod - def from_pretrained(cls, weights: str, **kwargs: Dict[str, Any]) -> "IExtractor": + def from_pretrained(cls, weights: str, **kwargs) -> "IExtractor": # type: ignore # The current class takes another model as a constructor's argument, so, they need to be # in the `self.pretrained_models`. The problem is these models will be instantiated even if we simply # import something from the current module. To avoid it we added the logic of wrapping/unwrapping