Skip to content
forked from pydata/xarray

Commit

Permalink
Squashed commit of the following:
Browse files Browse the repository at this point in the history
commit 08f7f74
Merge: 53c0f4e 278d2e6
Author: dcherian <[email protected]>
Date:   Tue Oct 29 09:36:58 2019 -0600

    Merge remote-tracking branch 'upstream/master' into fix/dask-computes

    * upstream/master:
      upgrade black verison to 19.10b0 (pydata#3456)
      Remove outdated code related to compatibility with netcdftime (pydata#3450)

commit 53c0f4e
Author: dcherian <[email protected]>
Date:   Tue Oct 29 09:25:27 2019 -0600

    Add identity check to lazy_array_equiv

commit 5e742e4
Author: dcherian <[email protected]>
Date:   Tue Oct 29 09:22:15 2019 -0600

    update whats new

commit ee0d422
Merge: e99148e 74ca69a
Author: dcherian <[email protected]>
Date:   Tue Oct 29 09:18:38 2019 -0600

    Merge remote-tracking branch 'upstream/master' into fix/dask-computes

    * upstream/master:
      Remove deprecated behavior from dataset.drop docstring (pydata#3451)
      jupyterlab dark theme (pydata#3443)
      Drop groups associated with nans in group variable (pydata#3406)
      Allow ellipsis (...) in transpose (pydata#3421)
      Another groupby.reduce bugfix. (pydata#3403)
      add icomoon license (pydata#3448)

commit e99148e
Author: dcherian <[email protected]>
Date:   Tue Oct 29 09:17:58 2019 -0600

    add concat test

commit 4a66e7c
Author: dcherian <[email protected]>
Date:   Mon Oct 28 10:19:32 2019 -0600

    review suggestions.

commit 8739ddd
Author: dcherian <[email protected]>
Date:   Mon Oct 28 08:32:15 2019 -0600

    better docstring

commit e84cc97
Author: dcherian <[email protected]>
Date:   Sun Oct 27 20:22:13 2019 -0600

    Optimize dask array equality checks.

    Dask arrays with the same graph have the same name. We can use this to quickly
    compare dask-backed variables without computing.

    Fixes pydata#3068 and pydata#3311
  • Loading branch information
dcherian committed Oct 30, 2019
1 parent 7612ecc commit 705e656
Show file tree
Hide file tree
Showing 6 changed files with 171 additions and 25 deletions.
3 changes: 3 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,9 @@ Bug fixes
but cloudpickle isn't (:issue:`3401`) by `Rhys Doyle <https://github.com/rdoyle45>`_
- Fix grouping over variables with NaNs. (:issue:`2383`, :pull:`3406`).
By `Deepak Cherian <https://github.com/dcherian>`_.
- Use dask names to compare dask objects prior to comparing values after computation.
(:issue:`3068`, :issue:`3311`, :issue:`3454`, :pull:`3453`).
By `Deepak Cherian <https://github.com/dcherian>`_.
- Sync with cftime by removing `dayofwk=-1` for cftime>=1.0.4.
By `Anderson Banihirwe <https://github.com/andersy005>`_.
- Fix :py:meth:`xarray.core.groupby.DataArrayGroupBy.reduce` and
Expand Down
16 changes: 16 additions & 0 deletions xarray/core/concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from . import dtypes, utils
from .alignment import align
from .duck_array_ops import lazy_array_equiv
from .merge import _VALID_COMPAT, unique_variable
from .variable import IndexVariable, Variable, as_variable
from .variable import concat as concat_vars
Expand Down Expand Up @@ -189,6 +190,21 @@ def process_subset_opt(opt, subset):
# all nonindexes that are not the same in each dataset
for k in getattr(datasets[0], subset):
if k not in concat_over:
equals[k] = None
variables = [ds.variables[k] for ds in datasets]
# first check without comparing values i.e. no computes
for var in variables[1:]:
equals[k] = getattr(variables[0], compat)(
var, equiv=lazy_array_equiv
)
if not equals[k]:
break

if equals[k] is not None:
if equals[k] is False:
concat_over.add(k)
continue

# Compare the variable of all datasets vs. the one
# of the first dataset. Perform the minimum amount of
# loads in order to avoid multiple loads from disk
Expand Down
58 changes: 43 additions & 15 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,27 +174,53 @@ def as_shared_dtype(scalars_or_arrays):
return [x.astype(out_type, copy=False) for x in arrays]


def allclose_or_equiv(arr1, arr2, rtol=1e-5, atol=1e-8):
"""Like np.allclose, but also allows values to be NaN in both arrays
def lazy_array_equiv(arr1, arr2):
"""Like array_equal, but doesn't actually compare values.
Returns True or False when equality can be determined without computing.
Returns None when equality cannot determined (e.g. one or both of arr1, arr2 are numpy arrays)
"""
if arr1 is arr2:
return True
arr1 = asarray(arr1)
arr2 = asarray(arr2)
if arr1.shape != arr2.shape:
return False
return bool(isclose(arr1, arr2, rtol=rtol, atol=atol, equal_nan=True).all())
if (
dask_array
and isinstance(arr1, dask_array.Array)
and isinstance(arr2, dask_array.Array)
):
# GH3068
if arr1.name == arr2.name:
return True
return None


def allclose_or_equiv(arr1, arr2, rtol=1e-5, atol=1e-8):
"""Like np.allclose, but also allows values to be NaN in both arrays
"""
arr1 = asarray(arr1)
arr2 = asarray(arr2)
lazy_equiv = lazy_array_equiv(arr1, arr2)
if lazy_equiv is None:
return bool(isclose(arr1, arr2, rtol=rtol, atol=atol, equal_nan=True).all())
else:
return lazy_equiv


def array_equiv(arr1, arr2):
"""Like np.array_equal, but also allows values to be NaN in both arrays
"""
arr1 = asarray(arr1)
arr2 = asarray(arr2)
if arr1.shape != arr2.shape:
return False
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "In the future, 'NAT == x'")
flag_array = (arr1 == arr2) | (isnull(arr1) & isnull(arr2))
return bool(flag_array.all())
lazy_equiv = lazy_array_equiv(arr1, arr2)
if lazy_equiv is None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "In the future, 'NAT == x'")
flag_array = (arr1 == arr2) | (isnull(arr1) & isnull(arr2))
return bool(flag_array.all())
else:
return lazy_equiv


def array_notnull_equiv(arr1, arr2):
Expand All @@ -203,12 +229,14 @@ def array_notnull_equiv(arr1, arr2):
"""
arr1 = asarray(arr1)
arr2 = asarray(arr2)
if arr1.shape != arr2.shape:
return False
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "In the future, 'NAT == x'")
flag_array = (arr1 == arr2) | isnull(arr1) | isnull(arr2)
return bool(flag_array.all())
lazy_equiv = lazy_array_equiv(arr1, arr2)
if lazy_equiv is None:
with warnings.catch_warnings():
warnings.filterwarnings("ignore", "In the future, 'NAT == x'")
flag_array = (arr1 == arr2) | isnull(arr1) | isnull(arr2)
return bool(flag_array.all())
else:
return lazy_equiv


def count(data, axis=None):
Expand Down
17 changes: 13 additions & 4 deletions xarray/core/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

from . import dtypes, pdcompat
from .alignment import deep_align
from .duck_array_ops import lazy_array_equiv
from .utils import Frozen, dict_equiv
from .variable import Variable, as_variable, assert_unique_multiindex_level_names

Expand Down Expand Up @@ -123,16 +124,24 @@ def unique_variable(
combine_method = "fillna"

if equals is None:
out = out.compute()
# first check without comparing values i.e. no computes
for var in variables[1:]:
equals = getattr(out, compat)(var)
equals = getattr(out, compat)(var, equiv=lazy_array_equiv)
if not equals:
break

# now compare values with minimum number of computes
if not equals:
out = out.compute()
for var in variables[1:]:
equals = getattr(out, compat)(var)
if not equals:
break

if not equals:
raise MergeError(
"conflicting values for variable {!r} on objects to be combined. "
"You can skip this check by specifying compat='override'.".format(name)
f"conflicting values for variable {name!r} on objects to be combined. "
"You can skip this check by specifying compat='override'."
)

if combine_method:
Expand Down
14 changes: 9 additions & 5 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -1231,7 +1231,9 @@ def transpose(self, *dims) -> "Variable":
dims = self.dims[::-1]
dims = tuple(infix_dims(dims, self.dims))
axes = self.get_axis_num(dims)
if len(dims) < 2: # no need to transpose if only one dimension
if len(dims) < 2 or dims == self.dims:
# no need to transpose if only one dimension
# or dims are in same order
return self.copy(deep=False)

data = as_indexable(self._data).transpose(axes)
Expand Down Expand Up @@ -1590,22 +1592,24 @@ def broadcast_equals(self, other, equiv=duck_array_ops.array_equiv):
return False
return self.equals(other, equiv=equiv)

def identical(self, other):
def identical(self, other, equiv=duck_array_ops.array_equiv):
"""Like equals, but also checks attributes.
"""
try:
return utils.dict_equiv(self.attrs, other.attrs) and self.equals(other)
return utils.dict_equiv(self.attrs, other.attrs) and self.equals(
other, equiv=equiv
)
except (TypeError, AttributeError):
return False

def no_conflicts(self, other):
def no_conflicts(self, other, equiv=duck_array_ops.array_notnull_equiv):
"""True if the intersection of two Variable's non-null data is
equal; otherwise false.
Variables can thus still be equal if there are locations where either,
or both, contain NaN values.
"""
return self.broadcast_equals(other, equiv=duck_array_ops.array_notnull_equiv)
return self.broadcast_equals(other, equiv=equiv)

def quantile(self, q, dim=None, interpolation="linear", keep_attrs=None):
"""Compute the qth quantile of the data along the specified dimension.
Expand Down
88 changes: 87 additions & 1 deletion xarray/tests/test_dask.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
assert_identical,
raises_regex,
)
from ..core.duck_array_ops import lazy_array_equiv

dask = pytest.importorskip("dask")
da = pytest.importorskip("dask.array")
Expand Down Expand Up @@ -423,7 +424,53 @@ def test_concat_loads_variables(self):
out.compute()
assert kernel_call_count == 24

# Finally, test that riginals are unaltered
# Finally, test that originals are unaltered
assert ds1["d"].data is d1
assert ds1["c"].data is c1
assert ds2["d"].data is d2
assert ds2["c"].data is c2
assert ds3["d"].data is d3
assert ds3["c"].data is c3

# now check that concat() is correctly using dask name equality to skip loads
out = xr.concat(
[ds1, ds1, ds1], dim="n", data_vars="different", coords="different"
)
assert kernel_call_count == 24
# variables are not loaded in the output
assert isinstance(out["d"].data, dask.array.Array)
assert isinstance(out["c"].data, dask.array.Array)

out = xr.concat(
[ds1, ds1, ds1], dim="n", data_vars=[], coords=[], compat="identical"
)
assert kernel_call_count == 24
# variables are not loaded in the output
assert isinstance(out["d"].data, dask.array.Array)
assert isinstance(out["c"].data, dask.array.Array)

out = xr.concat(
[ds1, ds2.compute(), ds3],
dim="n",
data_vars="all",
coords="different",
compat="identical",
)
# c1,c3 must be computed for comparison since c2 is numpy;
# d2 is computed too
assert kernel_call_count == 28

out = xr.concat(
[ds1, ds2.compute(), ds3],
dim="n",
data_vars="all",
coords="all",
compat="identical",
)
# no extra computes
assert kernel_call_count == 30

# Finally, test that originals are unaltered
assert ds1["d"].data is d1
assert ds1["c"].data is c1
assert ds2["d"].data is d2
Expand Down Expand Up @@ -1135,3 +1182,42 @@ def test_make_meta(map_ds):
for variable in map_ds.data_vars:
assert variable in meta.data_vars
assert meta.data_vars[variable].shape == (0,) * meta.data_vars[variable].ndim


def test_identical_coords_no_computes():
lons2 = xr.DataArray(da.zeros((10, 10), chunks=2), dims=("y", "x"))
a = xr.DataArray(
da.zeros((10, 10), chunks=2), dims=("y", "x"), coords={"lons": lons2}
)
b = xr.DataArray(
da.zeros((10, 10), chunks=2), dims=("y", "x"), coords={"lons": lons2}
)
with raise_if_dask_computes():
c = a + b
assert_identical(c, a)


def test_lazy_array_equiv():
lons1 = xr.DataArray(da.zeros((10, 10), chunks=2), dims=("y", "x"))
lons2 = xr.DataArray(da.zeros((10, 10), chunks=2), dims=("y", "x"))
var1 = lons1.variable
var2 = lons2.variable
with raise_if_dask_computes():
lons1.equals(lons2)
with raise_if_dask_computes():
var1.equals(var2 / 2, equiv=lazy_array_equiv)
assert var1.equals(var2.compute(), equiv=lazy_array_equiv) is None
assert var1.compute().equals(var2.compute(), equiv=lazy_array_equiv) is None

with raise_if_dask_computes():
assert lons1.equals(lons1.transpose("y", "x"))

with raise_if_dask_computes():
for compat in [
"broadcast_equals",
"equals",
"override",
"identical",
"no_conflicts",
]:
xr.merge([lons1, lons2], compat=compat)

0 comments on commit 705e656

Please sign in to comment.