Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable mypy warn unused ignores #7335

Merged
merged 4 commits into from
Dec 1, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ exclude_lines = ["pragma: no cover", "if TYPE_CHECKING"]
exclude = 'xarray/util/generate_.*\.py'
files = "xarray"
show_error_codes = true
warn_unused_ignores = true

# Most of the numerical computing stack doesn't have type annotations yet.
[[tool.mypy.overrides]]
Expand Down
2 changes: 1 addition & 1 deletion xarray/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
from importlib.metadata import version as _version
except ImportError:
# if the fallback library is missing, we are doomed.
from importlib_metadata import version as _version # type: ignore[no-redef]
from importlib_metadata import version as _version

try:
__version__ = _version("xarray")
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/locks.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ def _get_lock_maker(scheduler=None):
try:
from dask.distributed import Lock as DistributedLock
except ImportError:
DistributedLock = None # type: ignore
DistributedLock = None
return DistributedLock
else:
raise KeyError(scheduler)
Expand Down
104 changes: 52 additions & 52 deletions xarray/core/_typed_ops.pyi

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ def _get_dim_pos_indexers(
obj_idx = matching_indexes.get(key)
if obj_idx is not None:
if self.reindex[key]:
indexers = obj_idx.reindex_like(aligned_idx, **self.reindex_kwargs) # type: ignore[call-arg]
indexers = obj_idx.reindex_like(aligned_idx, **self.reindex_kwargs)
dim_pos_indexers.update(indexers)

return dim_pos_indexers
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -377,7 +377,7 @@ def _nested_combine(

# Define type for arbitrarily-nested list of lists recursively
# Currently mypy cannot handle this but other linters can (https://stackoverflow.com/a/53845083/3154101)
DATASET_HYPERCUBE = Union[Dataset, Iterable["DATASET_HYPERCUBE"]] # type: ignore
DATASET_HYPERCUBE = Union[Dataset, Iterable["DATASET_HYPERCUBE"]] # type: ignore[misc]


def combine_nested(
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/computation.py
Original file line number Diff line number Diff line change
Expand Up @@ -1734,7 +1734,7 @@ def dot(
dim_counts.update(arr.dims)
dims = tuple(d for d, c in dim_counts.items() if c > 1)

dot_dims: set[Hashable] = set(dims) # type:ignore[arg-type]
dot_dims: set[Hashable] = set(dims)

# dimensions to be parallelized
broadcast_dims = common_dims - dot_dims
Expand Down
10 changes: 4 additions & 6 deletions xarray/core/coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,11 +54,11 @@ def dtypes(self) -> Frozen[Hashable, np.dtype]:

@property
def indexes(self) -> Indexes[pd.Index]:
return self._data.indexes # type: ignore[attr-defined]
return self._data.indexes

@property
def xindexes(self) -> Indexes[Index]:
return self._data.xindexes # type: ignore[attr-defined]
return self._data.xindexes

@property
def variables(self):
Expand Down Expand Up @@ -116,11 +116,9 @@ def to_index(self, ordered_dims: Sequence[Hashable] | None = None) -> pd.Index:
raise ValueError("no valid index for a 0-dimensional object")
elif len(ordered_dims) == 1:
(dim,) = ordered_dims
return self._data.get_index(dim) # type: ignore[attr-defined]
return self._data.get_index(dim)
else:
indexes = [
self._data.get_index(k) for k in ordered_dims # type: ignore[attr-defined]
]
indexes = [self._data.get_index(k) for k in ordered_dims]

# compute the sizes of the repeat and tile for the cartesian product
# (taken from pandas.core.reshape.util)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -428,7 +428,7 @@ def __init__(

# TODO(shoyer): document this argument, once it becomes part of the
# public interface.
self._indexes = indexes # type: ignore[assignment]
self._indexes = indexes

self._close = None

Expand Down
4 changes: 2 additions & 2 deletions xarray/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -713,7 +713,7 @@ def _flox_reduce(
elif dim is ...:
parsed_dim = tuple(self._original_obj.dims)
else:
parsed_dim = tuple(dim) # type:ignore[arg-type]
parsed_dim = tuple(dim)

# Do this so we raise the same error message whether flox is present or not.
# Better to control it here than in flox.
Expand Down Expand Up @@ -747,7 +747,7 @@ def _flox_reduce(
result = xarray_reduce(
self._original_obj.drop_vars(non_numeric),
group,
dim=parsed_dim, # type:ignore[arg-type] # https://github.com/xarray-contrib/flox/issues/96
dim=parsed_dim,
expected_groups=expected_groups,
isbin=isbin,
keep_attrs=keep_attrs,
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def map_index_queries(
# forward dimension indexers with no index/coordinate
results.append(IndexSelResult(labels))
else:
results.append(index.sel(labels, **options)) # type: ignore[call-arg]
results.append(index.sel(labels, **options))

merged = merge_sel_results(results)

Expand Down Expand Up @@ -1422,7 +1422,7 @@ def __init__(self, array: pd.Index, dtype: DTypeLike = None):
if dtype is None:
self._dtype = get_valid_numpy_dtype(array)
else:
self._dtype = np.dtype(dtype) # type: ignore[assignment]
self._dtype = np.dtype(dtype)

@property
def dtype(self) -> np.dtype:
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -1509,7 +1509,7 @@ def pad(
if reflect_type is not None:
pad_option_kwargs["reflect_type"] = reflect_type

array = np.pad( # type: ignore[call-overload]
array = np.pad(
self.data.astype(dtype, copy=False),
pad_width_by_index,
mode=mode,
Expand Down
4 changes: 2 additions & 2 deletions xarray/plot/dataarray_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -1488,7 +1488,7 @@ def newplotfunc(
if ax is None:
# TODO: Importing Axes3D is no longer necessary in matplotlib >= 3.2.
# Remove when minimum requirement of matplotlib is 3.2:
from mpl_toolkits.mplot3d import Axes3D # type: ignore # noqa: F401
from mpl_toolkits.mplot3d import Axes3D # noqa: F401

# delete so it does not end up in locals()
del Axes3D
Expand Down Expand Up @@ -1521,7 +1521,7 @@ def newplotfunc(
and not kwargs.get("_is_facetgrid", False)
and ax is not None
):
import mpl_toolkits # type: ignore
import mpl_toolkits

if not isinstance(ax, mpl_toolkits.mplot3d.Axes3D):
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -3153,7 +3153,7 @@ def test_open_badbytes(self) -> None:
with pytest.raises(
ValueError, match=r"not the signature of a valid netCDF4 file"
):
with open_dataset(BytesIO(b"garbage"), engine="h5netcdf"): # type: ignore[arg-type]
with open_dataset(BytesIO(b"garbage"), engine="h5netcdf"):
pass

def test_open_twice(self) -> None:
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_coding_times.py
Original file line number Diff line number Diff line change
Expand Up @@ -614,7 +614,7 @@ def test_cf_timedelta_2d() -> None:

actual = coding.times.decode_cf_timedelta(numbers, units)
assert_array_equal(expected, actual)
assert expected.dtype == actual.dtype # type: ignore
assert expected.dtype == actual.dtype


@pytest.mark.parametrize(
Expand Down Expand Up @@ -651,7 +651,7 @@ def test_format_cftime_datetime(date_args, expected) -> None:
def test_decode_cf(calendar) -> None:
days = [1.0, 2.0, 3.0]
# TODO: GH5690 — do we want to allow this type for `coords`?
da = DataArray(days, coords=[days], dims=["time"], name="test") # type: ignore
da = DataArray(days, coords=[days], dims=["time"], name="test")
ds = da.to_dataset()

for v in ["test", "time"]:
Expand Down
6 changes: 3 additions & 3 deletions xarray/tests/test_dataarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -512,7 +512,7 @@ def test_equals_and_identical(self) -> None:

def test_equals_failures(self) -> None:
orig = DataArray(np.arange(5.0), {"a": 42}, dims="x")
assert not orig.equals(np.arange(5)) # type: ignore
assert not orig.equals(np.arange(5)) # type: ignore[arg-type]
assert not orig.identical(123) # type: ignore
assert not orig.broadcast_equals({1: 2}) # type: ignore

Expand Down Expand Up @@ -2754,9 +2754,9 @@ def test_quantile_method(self, method) -> None:
actual = DataArray(self.va).quantile(q, method=method)

if Version(np.__version__) >= Version("1.22.0"):
expected = np.nanquantile(self.dv.values, np.array(q), method=method) # type: ignore[call-arg]
expected = np.nanquantile(self.dv.values, np.array(q), method=method)
else:
expected = np.nanquantile(self.dv.values, np.array(q), interpolation=method) # type: ignore[call-arg]
expected = np.nanquantile(self.dv.values, np.array(q), interpolation=method)

np.testing.assert_allclose(actual.values, expected)

Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -6230,7 +6230,7 @@ def test_query(self, backend, engine, parser) -> None:
with pytest.raises(ValueError):
ds.query("a > 5") # type: ignore # must be dict or kwargs
with pytest.raises(ValueError):
ds.query(x=(a > 5)) # type: ignore # must be query string
ds.query(x=(a > 5))
with pytest.raises(IndexError):
ds.query(y="a > 5") # wrong length dimension
with pytest.raises(IndexError):
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_distributed.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ def test_dask_distributed_zarr_integration_test(
write_kwargs: dict[str, Any] = {"consolidated": True}
read_kwargs: dict[str, Any] = {"backend_kwargs": {"consolidated": True}}
else:
write_kwargs = read_kwargs = {} # type: ignore
write_kwargs = read_kwargs = {}
chunks = {"dim1": 4, "dim2": 3, "dim3": 5}
with cluster() as (s, [a, b]):
with Client(s["address"], loop=loop):
Expand Down
3 changes: 1 addition & 2 deletions xarray/tests/test_extensions.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,14 @@ def foo(self):

da = xr.DataArray(0)
assert da.demo.foo == "bar"

# accessor is cached
assert ds.demo is ds.demo

# check descriptor
assert ds.demo.__doc__ == "Demo accessor."
# TODO: typing doesn't seem to work with accessors
assert xr.Dataset.demo.__doc__ == "Demo accessor." # type: ignore
assert isinstance(ds.demo, DemoAccessor) # type: ignore
assert isinstance(ds.demo, DemoAccessor)
assert xr.Dataset.demo is DemoAccessor # type: ignore

# ensure we can remove it
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
try:
import matplotlib as mpl
import matplotlib.pyplot as plt
import mpl_toolkits # type: ignore
import mpl_toolkits
except ImportError:
pass

Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -1769,9 +1769,9 @@ def test_quantile_method(self, method, use_dask) -> None:
actual = v.quantile(q, dim="y", method=method)

if Version(np.__version__) >= Version("1.22"):
expected = np.nanquantile(self.d, q, axis=1, method=method) # type: ignore[call-arg]
expected = np.nanquantile(self.d, q, axis=1, method=method)
else:
expected = np.nanquantile(self.d, q, axis=1, interpolation=method) # type: ignore[call-arg]
expected = np.nanquantile(self.d, q, axis=1, interpolation=method)

if use_dask:
assert isinstance(actual.data, dask_array_type)
Expand Down
4 changes: 2 additions & 2 deletions xarray/util/generate_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ def {method}(self: T_Dataset, other: DsCompatible) -> T_Dataset: ...{override}""
@overload{override}
def {method}(self, other: T_Dataset) -> T_Dataset: ...
@overload
def {method}(self, other: "DatasetGroupBy") -> "Dataset": ... # type: ignore[misc]
def {method}(self, other: "DatasetGroupBy") -> "Dataset": ...
@overload
def {method}(self: T_DataArray, other: DaCompatible) -> T_DataArray: ..."""
stub_var = """\
Expand All @@ -124,7 +124,7 @@ def {method}(self: T_Variable, other: VarCompatible) -> T_Variable: ..."""
@overload{override}
def {method}(self, other: T_Dataset) -> T_Dataset: ...
@overload
def {method}(self, other: "DataArray") -> "Dataset": ... # type: ignore[misc]
def {method}(self, other: "DataArray") -> "Dataset": ...
@overload
def {method}(self, other: GroupByIncompatible) -> NoReturn: ..."""
stub_dagb = """\
Expand Down