diff --git a/eodag/config.py b/eodag/config.py index 27e9161fc..87831224d 100644 --- a/eodag/config.py +++ b/eodag/config.py @@ -246,6 +246,14 @@ class Sort(TypedDict): sort_order_mapping: Dict[Literal["ascending", "descending"], str] max_sort_params: Annotated[int, Gt(0)] + class DiscoverMetadata(TypedDict): + """Configuration for metadata discovery""" + + auto_discovery: bool + metadata_pattern: str + search_param: str + metadata_path: str + class OrderOnResponse(TypedDict): """Configuration for order on-response during download""" @@ -319,7 +327,7 @@ class OrderStatus(TypedDict): pagination: PluginConfig.Pagination sort: PluginConfig.Sort query_params_key: str - discover_metadata: Dict[str, Union[str, bool]] + discover_metadata: PluginConfig.DiscoverMetadata discover_product_types: Dict[str, Any] discover_queryables: Dict[str, Any] metadata_mapping: Dict[str, Union[str, List[str]]] diff --git a/eodag/plugins/authentication/openid_connect.py b/eodag/plugins/authentication/openid_connect.py index 842a8be63..3c816c9f7 100644 --- a/eodag/plugins/authentication/openid_connect.py +++ b/eodag/plugins/authentication/openid_connect.py @@ -482,7 +482,7 @@ def _constant_or_xpath_extracted( if not match: return value value_from_xpath = form_element.xpath( - self.CONFIG_XPATH_REGEX.match(value).groupdict("xpath_value")["xpath_value"] + match.groupdict("xpath_value")["xpath_value"] ) if len(value_from_xpath) == 1: return value_from_xpath[0] @@ -512,9 +512,10 @@ def __init__(self, token: str, where: str, key: Optional[str] = None) -> None: def __call__(self, request: PreparedRequest) -> PreparedRequest: """Perform the actual authentication""" if self.where == "qs": - parts = urlparse(request.url) + parts = urlparse(str(request.url)) query_dict = parse_qs(parts.query) - query_dict.update({self.key: self.token}) + if self.key is not None: + query_dict.update({self.key: [self.token]}) url_without_args = parts._replace(query="").geturl() request.prepare_url(url_without_args, query_dict) diff --git a/eodag/plugins/authentication/token.py b/eodag/plugins/authentication/token.py index 165751eda..d171eab1d 100644 --- a/eodag/plugins/authentication/token.py +++ b/eodag/plugins/authentication/token.py @@ -197,7 +197,8 @@ def __call__(self, request: PreparedRequest) -> PreparedRequest: if self.where == "qs": parts = urlparse(str(request.url)) qs = parse_qs(parts.query) - qs[self.qs_key] = self.token # type: ignore + if self.qs_key is not None: + qs[self.qs_key] = [self.token] request.url = urlunparse( ( parts.scheme, diff --git a/eodag/plugins/download/aws.py b/eodag/plugins/download/aws.py index c77766f06..49bd4d5a5 100644 --- a/eodag/plugins/download/aws.py +++ b/eodag/plugins/download/aws.py @@ -774,7 +774,7 @@ def _stream_download( build_safe: bool, progress_callback: ProgressCallback, assets_values: List[Dict[str, Any]], - ) -> Iterator[Tuple[str, datetime, int, Any, Iterator[Any]]]: + ) -> Iterator[Any]: """Yield product data chunks""" chunk_size = 4096 * 1024 diff --git a/eodag/plugins/download/base.py b/eodag/plugins/download/base.py index 1ba2bcbc0..2724ba6c4 100644 --- a/eodag/plugins/download/base.py +++ b/eodag/plugins/download/base.py @@ -670,7 +670,7 @@ def download_and_retry(*args: Any, **kwargs: Unpack[DownloadConf]) -> T: not_available_info = str(e) if datetime_now >= product.next_try and datetime_now < stop_time: - wait_seconds = ( + wait_seconds: Union[float, int] = ( datetime_now - product.next_try + timedelta(minutes=wait) ).seconds retry_count += 1 diff --git a/eodag/plugins/download/http.py b/eodag/plugins/download/http.py index 9437c3d13..51bd67a32 100644 --- a/eodag/plugins/download/http.py +++ b/eodag/plugins/download/http.py @@ -33,7 +33,6 @@ Iterator, List, Optional, - Tuple, TypedDict, Union, cast, @@ -1011,8 +1010,11 @@ def _stream_download( "content-disposition" ] = f"attachment; filename={filename}" content_type = product.headers.get("Content-Type") - if filename and not content_type: - product.headers["Content-Type"] = guess_file_type(filename) + guessed_content_type = ( + guess_file_type(filename) if filename and not content_type else None + ) + if guessed_content_type is not None: + product.headers["Content-Type"] = guessed_content_type progress_callback.reset(total=stream_size) for chunk in self.stream.iter_content(chunk_size=64 * 1024): @@ -1027,7 +1029,7 @@ def _stream_download_assets( progress_callback: Optional[ProgressCallback] = None, assets_values: List[Asset] = [], **kwargs: Unpack[DownloadConf], - ) -> Iterator[Tuple[str, datetime, int, Any, Iterator[Any]]]: + ) -> Iterator[Any]: if progress_callback is None: logger.info("Progress bar unavailable, please call product.download()") progress_callback = ProgressCallback(disable=True) @@ -1201,7 +1203,9 @@ def _download_assets( # start reading chunks to set asset.rel_path first_chunks_tuple = next(chunks_tuples) chunks = chain(iter([first_chunks_tuple]), chunks_tuples) - chunks_tuples = [(assets_values[0].rel_path, None, None, None, chunks)] + chunks_tuples = iter( + [(assets_values[0].rel_path, None, None, None, chunks)] + ) for chunk_tuple in chunks_tuples: asset_path = chunk_tuple[0] diff --git a/eodag/plugins/download/s3rest.py b/eodag/plugins/download/s3rest.py index 846010760..f33496616 100644 --- a/eodag/plugins/download/s3rest.py +++ b/eodag/plugins/download/s3rest.py @@ -153,6 +153,8 @@ def download_request( bucket_name, prefix = get_bucket_name_and_prefix( url=product.location, bucket_path_level=self.config.bucket_path_level ) + if prefix is None: + raise DownloadError(f"Could not extract prefix from {product.location}") if ( bucket_name is None diff --git a/eodag/plugins/manager.py b/eodag/plugins/manager.py index 0aa7c9b88..c28645cfe 100644 --- a/eodag/plugins/manager.py +++ b/eodag/plugins/manager.py @@ -111,7 +111,11 @@ def __init__(self, providers_config: Dict[str, ProviderConfig]) -> None: "Check that the plugin module (%s) is importable", entry_point.module_name, ) - if entry_point.dist and entry_point.dist.key != "eodag": + if ( + entry_point.dist + and entry_point.dist.key != "eodag" + and entry_point.dist.location is not None + ): # use plugin providers if any plugin_providers_config_path = [ str(x) diff --git a/eodag/plugins/search/__init__.py b/eodag/plugins/search/__init__.py index 3a4e88f76..56fcac80d 100644 --- a/eodag/plugins/search/__init__.py +++ b/eodag/plugins/search/__init__.py @@ -36,8 +36,8 @@ class PreparedSearch: """An object collecting needed information for search.""" product_type: Optional[str] = None - page: int = DEFAULT_PAGE - items_per_page: int = DEFAULT_ITEMS_PER_PAGE + page: Optional[int] = DEFAULT_PAGE + items_per_page: Optional[int] = DEFAULT_ITEMS_PER_PAGE auth: Optional[Union[AuthBase, Dict[str, str]]] = None auth_plugin: Optional[Authentication] = None count: bool = True diff --git a/eodag/plugins/search/cop_marine.py b/eodag/plugins/search/cop_marine.py index cc531055f..6177472b4 100644 --- a/eodag/plugins/search/cop_marine.py +++ b/eodag/plugins/search/cop_marine.py @@ -247,7 +247,7 @@ def query( items_per_page = prep.items_per_page # only return 1 page if pagination is disabled - if page > 1 and items_per_page <= 0: + if page is None or items_per_page is None or page > 1 and items_per_page <= 0: return ([], 0) if prep.count else ([], None) product_type = kwargs.get("productType", prep.product_type) diff --git a/eodag/plugins/search/qssearch.py b/eodag/plugins/search/qssearch.py index bd9728bb9..878d062fa 100644 --- a/eodag/plugins/search/qssearch.py +++ b/eodag/plugins/search/qssearch.py @@ -23,6 +23,7 @@ from typing import ( TYPE_CHECKING, Any, + Callable, Dict, List, Optional, @@ -203,7 +204,10 @@ class QueryStringSearch(Search): :type config: str """ - extract_properties = {"xml": properties_from_xml, "json": properties_from_json} + extract_properties: Dict[str, Callable[..., Dict[str, Any]]] = { + "xml": properties_from_xml, + "json": properties_from_json, + } def __init__(self, provider: str, config: PluginConfig) -> None: super(QueryStringSearch, self).__init__(provider, config) @@ -631,7 +635,7 @@ def discover_queryables( ) ) - field_definitions = dict() + field_definitions: Dict[str, Any] = dict() for json_param, json_mtd in constraint_params.items(): param = ( get_queryable_from_provider( @@ -786,7 +790,7 @@ def collect_search_urls( prep.need_count = True prep.total_items_nb = None - for collection in self.get_collections(prep, **kwargs): + for collection in self.get_collections(prep, **kwargs) or (None,): # skip empty collection if one is required in api_endpoint if "{collection}" in self.config.api_endpoint and not collection: continue @@ -1059,20 +1063,19 @@ def count_hits(self, count_url: str, result_type: Optional[str] = "json") -> int total_results = int(count_results) return total_results - def get_collections( - self, prep: PreparedSearch, **kwargs: Any - ) -> Tuple[Set[Dict[str, Any]], ...]: + def get_collections(self, prep: PreparedSearch, **kwargs: Any) -> Tuple[str, ...]: """Get the collection to which the product belongs""" # See https://earth.esa.int/web/sentinel/missions/sentinel-2/news/- # /asset_publisher/Ac0d/content/change-of # -format-for-new-sentinel-2-level-1c-products-starting-on-6-december product_type: Optional[str] = kwargs.get("productType") + collection: Optional[str] = None if product_type is None and ( not hasattr(prep, "product_type_def_params") or not prep.product_type_def_params ): - collections: Set[Dict[str, Any]] = set() - collection: Optional[str] = getattr(self.config, "collection", None) + collections: Set[str] = set() + collection = getattr(self.config, "collection", None) if collection is None: try: for product_type, product_config in self.config.products.items(): @@ -1090,18 +1093,26 @@ def get_collections( collections.add(collection) return tuple(collections) - collection: Optional[str] = getattr(self.config, "collection", None) + collection = getattr(self.config, "collection", None) if collection is None: collection = ( prep.product_type_def_params.get("collection", None) or product_type ) - return (collection,) if not isinstance(collection, list) else tuple(collection) + + if collection is None: + return () + elif not isinstance(collection, list): + return (collection,) + else: + return tuple(collection) def _request( self, prep: PreparedSearch, ) -> Response: url = prep.url + if url is None: + raise ValidationError("Cannot request empty URL") info_message = prep.info_message exception_message = prep.exception_message try: @@ -1347,8 +1358,11 @@ def query( "specific_qssearch" ].get("merge_responses", None) - self.count_hits = lambda *x, **y: 1 - self._request = super(PostJsonSearch, self)._request + def count_hits(self, *x, **y): + return 1 + + def _request(self, *x, **y): + return super(PostJsonSearch, self)._request(*x, **y) try: eo_products, total_items = super(PostJsonSearch, self).query( @@ -1449,7 +1463,7 @@ def collect_search_urls( auth_conf_dict = getattr(prep.auth_plugin.config, "credentials", {}) else: auth_conf_dict = {} - for collection in self.get_collections(prep, **kwargs): + for collection in self.get_collections(prep, **kwargs) or (None,): try: search_endpoint: str = self.config.api_endpoint.rstrip("/").format( **dict(collection=collection, **auth_conf_dict) @@ -1472,7 +1486,11 @@ def collect_search_urls( if getattr(self.config, "merge_responses", False): total_results = _total_results or 0 else: - total_results += _total_results or 0 + total_results = ( + (_total_results or 0) + if total_results is None + else total_results + (_total_results or 0) + ) if "next_page_query_obj" in self.config.pagination and isinstance( self.config.pagination["next_page_query_obj"], str ): @@ -1497,6 +1515,8 @@ def _request( prep: PreparedSearch, ) -> Response: url = prep.url + if url is None: + raise ValidationError("Cannot request empty URL") info_message = prep.info_message exception_message = prep.exception_message timeout = getattr(self.config, "timeout", HTTP_REQ_TIMEOUT) @@ -1515,7 +1535,10 @@ def _request( kwargs["auth"] = prep.auth # perform the request using the next page arguments if they are defined - if getattr(self, "next_page_query_obj", None): + if ( + hasattr(self, "next_page_query_obj") + and self.next_page_query_obj is not None + ): prep.query_params = self.next_page_query_obj if info_message: logger.info(info_message) diff --git a/eodag/rest/core.py b/eodag/rest/core.py index 06c8d5954..656fb593d 100644 --- a/eodag/rest/core.py +++ b/eodag/rest/core.py @@ -80,7 +80,7 @@ ) if TYPE_CHECKING: - from typing import Any, Callable, Dict, List, Optional, Tuple, Union + from typing import Any, Dict, List, Optional, Tuple, Union from fastapi import Request from requests.auth import AuthBase @@ -215,7 +215,7 @@ def search_stac_items( } search_results = eodag_api.search(count=True, **criteria) - total = search_results.number_matched + total = search_results.number_matched or 0 if search_request.crunch: search_results = crunch_products( search_results, search_request.crunch, **criteria @@ -588,9 +588,9 @@ def get_stac_extension_oseo(url: str) -> Dict[str, str]: :rtype: dict """ - apply_method: Callable[[str, str], str] = lambda _, x: str(x).replace( - "$.product.", "$." - ) + def apply_method(_: str, x: str) -> str: + return str(x).replace("$.product.", "$.") + item_mapping = dict_items_recursive_apply(stac_config["item"], apply_method) # all properties as string type by default diff --git a/eodag/rest/stac.py b/eodag/rest/stac.py index 5f042a720..de9c2e75a 100644 --- a/eodag/rest/stac.py +++ b/eodag/rest/stac.py @@ -21,7 +21,7 @@ import os from collections import defaultdict from datetime import datetime, timezone -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast from urllib.parse import parse_qs, urlencode, urlparse, urlunparse import dateutil.parser @@ -149,16 +149,18 @@ def update_data(self, data: Dict[str, Any]) -> None: ): for i, bbox in enumerate(self.data["extent"]["spatial"]["bbox"]): self.data["extent"]["spatial"]["bbox"][i] = [float(x) for x in bbox] - # "None" values to None - apply_method: Callable[[str, str], Optional[str]] = lambda _, v: ( - None if v == "None" else v - ) - self.data = dict_items_recursive_apply(self.data, apply_method) - # ids and titles as str - apply_method: Callable[[str, str], Optional[str]] = lambda k, v: ( - str(v) if k in ["title", "id"] else v - ) - self.data = dict_items_recursive_apply(self.data, apply_method) + + def apply_method_none(_: str, v: str) -> Optional[str]: + """ "None" values to None""" + return None if v == "None" else v + + self.data = dict_items_recursive_apply(self.data, apply_method_none) + + def apply_method_ids(k, v): + """ids and titles as str""" + return str(v) if k in ["title", "id"] else v + + self.data = dict_items_recursive_apply(self.data, apply_method_ids) # empty stac_extensions: "" to [] if not self.data.get("stac_extensions", True): diff --git a/eodag/rest/types/eodag_search.py b/eodag/rest/types/eodag_search.py index d619bbf05..2557d83ba 100644 --- a/eodag/rest/types/eodag_search.py +++ b/eodag/rest/types/eodag_search.py @@ -49,10 +49,7 @@ from eodag.utils import DEFAULT_ITEMS_PER_PAGE if TYPE_CHECKING: - try: - from typing import Self - except ImportError: - from _typeshed import Self + from typing_extensions import Self Geometry = Union[ Dict[str, Any], diff --git a/eodag/rest/types/queryables.py b/eodag/rest/types/queryables.py index 2b6237474..89073807d 100644 --- a/eodag/rest/types/queryables.py +++ b/eodag/rest/types/queryables.py @@ -51,14 +51,16 @@ def _serialize(self, handler: SerializerFunctionWrapHandler) -> Dict[str, Any]: dumped: Dict[str, Any] = handler(self) return {EODAGSearch.to_eodag(k): v for k, v in dumped.items()} - @computed_field + # use [prop-decorator] mypy error code when mypy==1.12 is released + @computed_field # type: ignore[misc] @property def start_datetime(self) -> Optional[str]: """Extract start_datetime property from datetime""" start = str_to_interval(self.datetime)[0] return start.strftime("%Y-%m-%dT%H:%M:%SZ") if start else None - @computed_field + # use [prop-decorator] mypy error code when mypy==1.12 is released + @computed_field # type: ignore[misc] @property def end_datetime(self) -> Optional[str]: """Extract end_datetime property from datetime""" diff --git a/eodag/rest/types/stac_search.py b/eodag/rest/types/stac_search.py index fe3c13829..6f04da23a 100644 --- a/eodag/rest/types/stac_search.py +++ b/eodag/rest/types/stac_search.py @@ -48,10 +48,7 @@ from eodag.utils.exceptions import ValidationError if TYPE_CHECKING: - try: - from typing import Self - except ImportError: - from _typeshed import Self + from typing_extensions import Self NumType = Union[float, int] diff --git a/eodag/types/__init__.py b/eodag/types/__init__.py index a8049e70a..8db0286e0 100644 --- a/eodag/types/__init__.py +++ b/eodag/types/__init__.py @@ -200,7 +200,7 @@ def python_field_definition_to_json( "%s must be an instance of Annotated" % python_field_definition ) - json_field_definition = dict() + json_field_definition: Dict[str, Any] = dict() python_field_args = get_args(python_field_definition) @@ -210,6 +210,10 @@ def python_field_definition_to_json( type_data = python_type_to_json(type(enum_args[0])) if isinstance(type_data, str): json_field_definition["type"] = type_data + elif type_data is None: + json_field_definition["type"] = json_field_definition[ + "min" + ] = json_field_definition["max"] = None else: json_field_definition["type"] = [row["type"] for row in type_data] json_field_definition["min"] = [ @@ -224,6 +228,10 @@ def python_field_definition_to_json( field_type = python_type_to_json(python_field_args[0]) if isinstance(field_type, str): json_field_definition["type"] = field_type + elif field_type is None: + json_field_definition["type"] = json_field_definition[ + "min" + ] = json_field_definition["max"] = None else: json_field_definition["type"] = [row["type"] for row in field_type] json_field_definition["min"] = [ diff --git a/eodag/types/download_args.py b/eodag/types/download_args.py index 99e4a4ef0..817a23439 100644 --- a/eodag/types/download_args.py +++ b/eodag/types/download_args.py @@ -17,7 +17,7 @@ # limitations under the License. from __future__ import annotations -from typing import Dict, TypedDict +from typing import Dict, Optional, TypedDict class DownloadConf(TypedDict, total=False): @@ -28,4 +28,4 @@ class DownloadConf(TypedDict, total=False): extract: bool dl_url_params: Dict[str, str] delete_archive: bool - asset: str + asset: Optional[str] diff --git a/eodag/utils/__init__.py b/eodag/utils/__init__.py index a536d489e..44f13187c 100644 --- a/eodag/utils/__init__.py +++ b/eodag/utils/__init__.py @@ -55,6 +55,7 @@ Any, Callable, Dict, + Iterable, Iterator, List, Mapping, @@ -1435,7 +1436,7 @@ def cast_scalar_value(value: Any, new_type: Any) -> Any: class StreamResponse: """Represents a streaming response""" - content: Iterator[bytes] + content: Iterable[bytes] headers: Optional[Mapping[str, str]] = None media_type: Optional[str] = None status_code: Optional[int] = None diff --git a/eodag/utils/notebook.py b/eodag/utils/notebook.py index 8adb6cdcc..1bfe61411 100644 --- a/eodag/utils/notebook.py +++ b/eodag/utils/notebook.py @@ -23,7 +23,7 @@ def check_ipython() -> bool: """Check if called from ipython""" try: - __IPYTHON__ + __IPYTHON__ # type: ignore[name-defined] return True except NameError: return False @@ -32,7 +32,7 @@ def check_ipython() -> bool: def check_notebook() -> bool: """Check if called from a notebook""" try: - shell = get_ipython().__class__.__name__ + shell = get_ipython().__class__.__name__ # type: ignore[name-defined] if shell == "ZMQInteractiveShell": return True # Jupyter notebook or qtconsole elif shell == "TerminalInteractiveShell": @@ -69,7 +69,7 @@ def display_html(self, html_value: str) -> None: if not self.is_notebook: return None - self.html_box.data = html_value + setattr(self.html_box, "data", html_value) if not self.html_box_shown: self._html_handle = self.display(self.html_box, display_id=True) @@ -83,5 +83,5 @@ def clear_html(self) -> None: if not self.is_notebook: return None - self.html_box.data = "" + setattr(self.html_box, "data", "") self._update_display(self.html_box, display_id=self._html_handle.display_id) diff --git a/eodag/utils/requests.py b/eodag/utils/requests.py index 9fda6e135..15a58432e 100644 --- a/eodag/utils/requests.py +++ b/eodag/utils/requests.py @@ -32,7 +32,7 @@ def fetch_json( file_url: str, req_session: Optional[requests.Session] = None, - auth: Optional[requests.AuthBase] = None, + auth: Optional[requests.auth.AuthBase] = None, timeout: float = HTTP_REQ_TIMEOUT, ) -> Any: """ @@ -105,7 +105,9 @@ def _chkpath(method: str, path: str) -> Tuple[int, str]: else: return 200, "OK" - def send(self, req: requests.PreparedRequest, **kwargs: Any) -> requests.Response: + def send( + self, request: requests.PreparedRequest, *args: Any, **kwargs: Any + ) -> requests.Response: """Wraps a file, described in request, in a Response object. :param req: The PreparedRequest being "sent". @@ -117,19 +119,20 @@ def send(self, req: requests.PreparedRequest, **kwargs: Any) -> requests.Respons """ response = requests.Response() - path_url = uri_to_path(req.url) - - if req.method is None or req.url is None: + if request.method is None or request.url is None: raise RequestError("Method or url of the request is missing") - response.status_code, response.reason = self._chkpath(req.method, path_url) - if response.status_code == 200 and req.method.lower() != "head": + + path_url = uri_to_path(request.url) + + response.status_code, response.reason = self._chkpath(request.method, path_url) + if response.status_code == 200 and request.method.lower() != "head": try: response.raw = open(path_url, "rb") except (OSError, IOError) as err: response.status_code = 500 response.reason = str(err) - response.url = req.url - response.request = req + response.url = request.url + response.request = request return response diff --git a/eodag/utils/stac_reader.py b/eodag/utils/stac_reader.py index 54fdd7183..a7b6e22a9 100644 --- a/eodag/utils/stac_reader.py +++ b/eodag/utils/stac_reader.py @@ -128,7 +128,7 @@ def fetch_stac_items( # URI opener used by PySTAC internally, instantiated here # to retrieve the timeout. _text_opener = _TextOpener(timeout, ssl_verify) - pystac.StacIO.read_text = _text_opener + pystac.StacIO.read_text = _text_opener # type: ignore[assignment] stac_obj = pystac.read_file(stac_path) # Single STAC item @@ -213,7 +213,7 @@ def fetch_stac_collections( # URI opener used by PySTAC internally, instantiated here to retrieve the timeout. _text_opener = _TextOpener(timeout, ssl_verify) - pystac.StacIO.read_text = _text_opener + pystac.StacIO.read_text = _text_opener # type: ignore[assignment] stac_obj = pystac.read_file(stac_path) if isinstance(stac_obj, pystac.Catalog): diff --git a/pyproject.toml b/pyproject.toml index 5ea9e71cb..26141d4c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,12 @@ build-backend = "setuptools.build_meta" [tool.setuptools_scm] fallback_version = "3.0.0b3.dev0" +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "if TYPE_CHECKING:" +] + [[tool.mypy.overrides]] module = [ "click", diff --git a/setup.cfg b/setup.cfg index c7084c2c4..3cb5e503a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -31,6 +31,7 @@ classifiers = Topic :: Internet :: WWW/HTTP :: Indexing/Search Topic :: Scientific/Engineering :: GIS Topic :: Software Development :: Libraries :: Python Modules + Typing :: Typed [options] packages = find: @@ -117,6 +118,12 @@ dev = stdlib-list boto3-stubs[essential] types-lxml + types-cachetools + types-requests + types-python-dateutil + types-setuptools + types-tqdm + mypy docs = eodag[all] sphinx diff --git a/tests/units/test_search_plugins.py b/tests/units/test_search_plugins.py index 0dd28fefd..bad66140b 100644 --- a/tests/units/test_search_plugins.py +++ b/tests/units/test_search_plugins.py @@ -764,9 +764,7 @@ def test_plugins_search_postjsonsearch_count_and_search_awseos(self, mock__reque self.assertEqual(len(products), number_of_products) self.assertIsInstance(products[0], EOProduct) - @mock.patch( - "eodag.plugins.search.qssearch.QueryStringSearch._request", autospec=True - ) + @mock.patch("eodag.plugins.search.qssearch.PostJsonSearch._request", autospec=True) def test_plugins_search_postjsonsearch_count_and_search_awseos_s2l2a( self, mock__request ): diff --git a/tox.ini b/tox.ini index e5f2973eb..af8ad6dc7 100644 --- a/tox.ini +++ b/tox.ini @@ -84,6 +84,6 @@ commands = [testenv:linters] basepython = python3 -skip_install = true -deps = pre-commit -commands = pre-commit run --all-files +commands = + pre-commit run --all-files + python -m mypy -p eodag