Skip to content
forked from pydata/xarray

Commit

Permalink
cleanup more
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian committed Jun 21, 2024
1 parent 2649f76 commit 6c60cf7
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 15 deletions.
29 changes: 15 additions & 14 deletions xarray/core/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,10 @@
from xarray.core.arithmetic import DataArrayGroupbyArithmetic, DatasetGroupbyArithmetic
from xarray.core.common import ImplementsArrayReduce, ImplementsDatasetReduce
from xarray.core.concat import concat
from xarray.core.coordinates import Coordinates
from xarray.core.formatting import format_array_flat
from xarray.core.indexes import (
PandasIndex,
create_default_index_implicit,
filter_indexes_from_coords,
)
Expand Down Expand Up @@ -650,24 +652,20 @@ def _binary_op(self, other, f, reflexive=False):

(grouper,) = self.groupers
obj = self._original_obj
name = grouper.name
group = grouper.group
codes = self._codes
dims = group.dims

if isinstance(grouper.group, _DummyGroup):
if isinstance(group, _DummyGroup):
group = coord = group.to_dataarray()
else:
coord = grouper.unique_coord
if isinstance(coord, Variable):
assert coord.ndim == 1
(coord_dim,) = coord.dims
# TODO: explicitly create Index here
coord = DataArray(
dims=coord_dim,
data=coord.data,
attrs=coord.attrs,
coords={coord_dim: coord.data},
)
coord = DataArray(coord, coords={coord_dim: coord.data})
name = grouper.name

if not isinstance(other, (Dataset, DataArray)):
Expand Down Expand Up @@ -783,6 +781,7 @@ def _flox_reduce(

obj = self._original_obj
(grouper,) = self.groupers
name = grouper.name
isbin = isinstance(grouper.grouper, BinGrouper)

if keep_attrs is None:
Expand Down Expand Up @@ -814,14 +813,14 @@ def _flox_reduce(
# weird backcompat
# reducing along a unique indexed dimension with squeeze=True
# should raise an error
if (dim is None or dim == grouper.name) and grouper.name in obj.xindexes:
index = obj.indexes[grouper.name]
if (dim is None or dim == name) and grouper.name in obj.xindexes:
index = obj.indexes[name]
if index.is_unique and self._squeeze:
raise ValueError(f"cannot reduce over dimensions {grouper.name!r}")
raise ValueError(f"cannot reduce over dimensions {name!r}")

unindexed_dims: tuple[Hashable, ...] = tuple()
if isinstance(grouper.group, _DummyGroup) and not isbin:
unindexed_dims = (grouper.name,)
unindexed_dims = (name,)

parsed_dim: tuple[Hashable, ...]
if isinstance(dim, str):
Expand Down Expand Up @@ -865,10 +864,12 @@ def _flox_reduce(
# in the grouped variable
group_dims = grouper.group.dims
if set(group_dims).issubset(set(parsed_dim)):
new_coord = Variable(
dims=grouper.name, data=np.array(output_index), attrs=self._codes.attrs
result = result.assign_coords(
Coordinates(
coords={name: (name, np.array(output_index))},
indexes={name: PandasIndex(output_index, dim=name)},
)
)
result = result.assign_coords({grouper.name: new_coord})
result = result.drop_vars(unindexed_dims)

# broadcast and restore non-numeric data variables (backcompat)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/groupers.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def _factorize_dummy(self) -> EncodedGroups:
full_index = pd.RangeIndex(self.group.size)
else:
codes = self.group.copy(data=size_range)
unique_coord = self.group.variable
unique_coord = self.group.variable.to_base_variable()
full_index = pd.Index(unique_coord.data)

return EncodedGroups(
Expand Down

0 comments on commit 6c60cf7

Please sign in to comment.