diff --git a/.github/workflows/ci-additional.yaml b/.github/workflows/ci-additional.yaml index e2685f445d7..ff3e8ab7e63 100644 --- a/.github/workflows/ci-additional.yaml +++ b/.github/workflows/ci-additional.yaml @@ -105,11 +105,9 @@ jobs: run: | python -m pip install mypy - # Temporarily overriding to be true due to https://github.com/pydata/xarray/issues/6551 - # python -m mypy --install-types --non-interactive - name: Run mypy run: | - python -m mypy --install-types --non-interactive || true + python -m mypy --install-types --non-interactive min-version-policy: name: Minimum Version Policy diff --git a/xarray/backends/api.py b/xarray/backends/api.py index 9967b0a08c0..4962a4a9c02 100644 --- a/xarray/backends/api.py +++ b/xarray/backends/api.py @@ -35,7 +35,7 @@ try: from dask.delayed import Delayed except ImportError: - Delayed = None + Delayed = None # type: ignore DATAARRAY_NAME = "__xarray_dataarray_name__" diff --git a/xarray/backends/locks.py b/xarray/backends/locks.py index 59417336f5f..1cc93779843 100644 --- a/xarray/backends/locks.py +++ b/xarray/backends/locks.py @@ -7,12 +7,12 @@ from dask.utils import SerializableLock except ImportError: # no need to worry about serializing the lock - SerializableLock = threading.Lock + SerializableLock = threading.Lock # type: ignore try: from dask.distributed import Lock as DistributedLock except ImportError: - DistributedLock = None + DistributedLock = None # type: ignore # Locks used by multiple backends. diff --git a/xarray/core/_typed_ops.pyi b/xarray/core/_typed_ops.pyi index e23b5848ff7..e5b3c9112c7 100644 --- a/xarray/core/_typed_ops.pyi +++ b/xarray/core/_typed_ops.pyi @@ -21,7 +21,7 @@ from .variable import Variable try: from dask.array import Array as DaskArray except ImportError: - DaskArray = np.ndarray + DaskArray = np.ndarray # type: ignore # DatasetOpsMixin etc. are parent classes of Dataset etc. # Because of https://github.com/pydata/xarray/issues/5755, we redefine these. Generally diff --git a/xarray/core/computation.py b/xarray/core/computation.py index 1a32cda512c..6f1e08bf84f 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -17,7 +17,6 @@ Iterable, Mapping, Sequence, - overload, ) import numpy as np @@ -1846,24 +1845,26 @@ def where(cond, x, y, keep_attrs=None): ) -@overload -def polyval(coord: DataArray, coeffs: DataArray, degree_dim: Hashable) -> DataArray: - ... +# These overloads seem not to work — mypy says it can't find a matching overload for +# `DataArray` & `DataArray`, despite that being in the first overload. Would be nice to +# have overloaded functions rather than just `T_Xarray` for everything. +# @overload +# def polyval(coord: DataArray, coeffs: DataArray, degree_dim: Hashable) -> DataArray: +# ... -@overload -def polyval(coord: T_Xarray, coeffs: Dataset, degree_dim: Hashable) -> Dataset: - ... +# @overload +# def polyval(coord: T_Xarray, coeffs: Dataset, degree_dim: Hashable) -> Dataset: +# ... -@overload -def polyval(coord: Dataset, coeffs: T_Xarray, degree_dim: Hashable) -> Dataset: - ... + +# @overload +# def polyval(coord: Dataset, coeffs: T_Xarray, degree_dim: Hashable) -> Dataset: +# ... -def polyval( - coord: T_Xarray, coeffs: T_Xarray, degree_dim: Hashable = "degree" -) -> T_Xarray: +def polyval(coord: T_Xarray, coeffs: T_Xarray, degree_dim="degree") -> T_Xarray: """Evaluate a polynomial at specific values Parameters diff --git a/xarray/core/dask_array_compat.py b/xarray/core/dask_array_compat.py index 4d73867a283..e114c238b72 100644 --- a/xarray/core/dask_array_compat.py +++ b/xarray/core/dask_array_compat.py @@ -5,7 +5,7 @@ try: import dask.array as da except ImportError: - da = None + da = None # type: ignore def _validate_pad_output_shape(input_shape, pad_width, output_shape): diff --git a/xarray/core/dataarray.py b/xarray/core/dataarray.py index d15cbd00c0d..fc3cbef16f8 100644 --- a/xarray/core/dataarray.py +++ b/xarray/core/dataarray.py @@ -17,6 +17,7 @@ import numpy as np import pandas as pd +from ..backends.common import AbstractDataStore, ArrayWriter from ..coding.calendar_ops import convert_calendar, interp_calendar from ..coding.cftimeindex import CFTimeIndex from ..plot.plot import _PlotMethods @@ -67,7 +68,7 @@ try: from dask.delayed import Delayed except ImportError: - Delayed = None + Delayed = None # type: ignore try: from cdms2 import Variable as cdms2_Variable except ImportError: @@ -2875,7 +2876,9 @@ def to_masked_array(self, copy: bool = True) -> np.ma.MaskedArray: isnull = pd.isnull(values) return np.ma.MaskedArray(data=values, mask=isnull, copy=copy) - def to_netcdf(self, *args, **kwargs) -> bytes | Delayed | None: + def to_netcdf( + self, *args, **kwargs + ) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: """Write DataArray contents to a netCDF file. All parameters are passed directly to :py:meth:`xarray.Dataset.to_netcdf`. diff --git a/xarray/core/dataset.py b/xarray/core/dataset.py index 76776b4bc44..1166e240120 100644 --- a/xarray/core/dataset.py +++ b/xarray/core/dataset.py @@ -32,6 +32,7 @@ import xarray as xr +from ..backends.common import ArrayWriter from ..coding.calendar_ops import convert_calendar, interp_calendar from ..coding.cftimeindex import CFTimeIndex, _parse_array_of_cftime_strings from ..plot.dataset_plot import _Dataset_PlotMethods @@ -110,7 +111,7 @@ try: from dask.delayed import Delayed except ImportError: - Delayed = None + Delayed = None # type: ignore # list of attributes of pd.DatetimeIndex that are ndarrays of time info @@ -1686,7 +1687,7 @@ def to_netcdf( unlimited_dims: Iterable[Hashable] = None, compute: bool = True, invalid_netcdf: bool = False, - ) -> bytes | Delayed | None: + ) -> tuple[ArrayWriter, AbstractDataStore] | bytes | Delayed | None: """Write dataset contents to a netCDF file. Parameters diff --git a/xarray/core/duck_array_ops.py b/xarray/core/duck_array_ops.py index b85d0e1645e..253a68b7205 100644 --- a/xarray/core/duck_array_ops.py +++ b/xarray/core/duck_array_ops.py @@ -30,7 +30,7 @@ import dask.array as dask_array from dask.base import tokenize except ImportError: - dask_array = None + dask_array = None # type: ignore def _dask_or_eager_func( diff --git a/xarray/core/nanops.py b/xarray/core/nanops.py index c1a4d629f97..fa96bd6e150 100644 --- a/xarray/core/nanops.py +++ b/xarray/core/nanops.py @@ -11,7 +11,7 @@ from . import dask_array_compat except ImportError: - dask_array = None + dask_array = None # type: ignore[assignment] dask_array_compat = None # type: ignore[assignment] diff --git a/xarray/core/types.py b/xarray/core/types.py index 3f368501b25..74cb2fc2d46 100644 --- a/xarray/core/types.py +++ b/xarray/core/types.py @@ -16,7 +16,7 @@ try: from dask.array import Array as DaskArray except ImportError: - DaskArray = np.ndarray + DaskArray = np.ndarray # type: ignore T_Dataset = TypeVar("T_Dataset", bound="Dataset") diff --git a/xarray/tests/test_computation.py b/xarray/tests/test_computation.py index 127fdc5404f..c59f1a6584f 100644 --- a/xarray/tests/test_computation.py +++ b/xarray/tests/test_computation.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import functools import operator import pickle @@ -22,6 +24,7 @@ unified_dim_sizes, ) from xarray.core.pycompat import dask_version +from xarray.core.types import T_Xarray from . import has_dask, raise_if_dask_computes, requires_dask @@ -2009,14 +2012,14 @@ def test_where_attrs() -> None: ), ], ) -def test_polyval(use_dask, x, coeffs, expected) -> None: +def test_polyval(use_dask, x: T_Xarray, coeffs: T_Xarray, expected) -> None: if use_dask: if not has_dask: pytest.skip("requires dask") coeffs = coeffs.chunk({"degree": 2}) x = x.chunk({"x": 2}) with raise_if_dask_computes(): - actual = xr.polyval(x, coeffs) + actual = xr.polyval(coord=x, coeffs=coeffs) xr.testing.assert_allclose(actual, expected) diff --git a/xarray/tests/test_testing.py b/xarray/tests/test_testing.py index 2bde7529d1e..1470706d0eb 100644 --- a/xarray/tests/test_testing.py +++ b/xarray/tests/test_testing.py @@ -10,7 +10,7 @@ try: from dask.array import from_array as dask_from_array except ImportError: - dask_from_array = lambda x: x + dask_from_array = lambda x: x # type: ignore try: import pint diff --git a/xarray/util/generate_ops.py b/xarray/util/generate_ops.py index f1fd6cbfeb2..0a382642708 100644 --- a/xarray/util/generate_ops.py +++ b/xarray/util/generate_ops.py @@ -210,7 +210,7 @@ def inplace(): try: from dask.array import Array as DaskArray except ImportError: - DaskArray = np.ndarray + DaskArray = np.ndarray # type: ignore # DatasetOpsMixin etc. are parent classes of Dataset etc. # Because of https://github.com/pydata/xarray/issues/5755, we redefine these. Generally