Skip to content

Commit

Permalink
upd
Browse files Browse the repository at this point in the history
  • Loading branch information
AlekseySh committed Jul 5, 2024
1 parent 2263bf5 commit 3b828ec
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion oml/interfaces/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def feat_dim(self) -> int:
raise NotImplementedError()

@classmethod
def from_pretrained(cls, weights: str, **kwargs: Dict[str, Any]) -> "IExtractor":
def from_pretrained(cls, weights: str, **kwargs) -> "IExtractor": # type: ignore
"""
This method allows to download a pretrained checkpoint.
The class field ``self.pretrained_models`` is the dictionary which keeps records of all the available
Expand Down
4 changes: 2 additions & 2 deletions oml/models/meta/projection.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from pathlib import Path
from typing import Any, Dict, List, Optional, Union
from typing import List, Optional, Union

import torch
from torchvision.ops import MLP
Expand Down Expand Up @@ -88,7 +88,7 @@ def unfreeze(self) -> None:
self.train_backbone = True

@classmethod
def from_pretrained(cls, weights: str, **kwargs: Dict[str, Any]) -> "IExtractor":
def from_pretrained(cls, weights: str, **kwargs) -> "IExtractor": # type: ignore
# The current class takes another model as a constructor's argument, so, they need to be
# in the `self.pretrained_models`. The problem is these models will be instantiated even if we simply
# import something from the current module. To avoid it we added the logic of wrapping/unwrapping
Expand Down

0 comments on commit 3b828ec

Please sign in to comment.