Skip to content
forked from pydata/xarray

Commit

Permalink
Fix generator for aggregations
Browse files Browse the repository at this point in the history
  • Loading branch information
dcherian committed Dec 22, 2023
1 parent 97f1695 commit d6a3f2d
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 14 deletions.
80 changes: 66 additions & 14 deletions xarray/core/_aggregations.py
Original file line number Diff line number Diff line change
Expand Up @@ -2315,6 +2315,19 @@ def cumprod(
class DatasetGroupByAggregations:
_obj: Dataset

def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> Dataset:
raise NotImplementedError()

def reduce(
self,
func: Callable[..., Any],
Expand Down Expand Up @@ -3829,6 +3842,19 @@ def cumprod(
class DatasetResampleAggregations:
_obj: Dataset

def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> Dataset:
raise NotImplementedError()

def reduce(
self,
func: Callable[..., Any],
Expand Down Expand Up @@ -5112,21 +5138,21 @@ def median(
>>> ds.resample(time="3M").median()
<xarray.Dataset>
Dimensions: (__resample_dim__: 3)
Dimensions: (time: 3)
Coordinates:
* __resample_dim__ (__resample_dim__) datetime64[ns] 2001-01-31 ... 2001-0...
* time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31
Data variables:
da (__resample_dim__) float64 1.0 2.0 2.0
da (time) float64 1.0 2.0 2.0
Use ``skipna`` to control whether NaNs are ignored.
>>> ds.resample(time="3M").median(skipna=False)
<xarray.Dataset>
Dimensions: (__resample_dim__: 3)
Dimensions: (time: 3)
Coordinates:
* __resample_dim__ (__resample_dim__) datetime64[ns] 2001-01-31 ... 2001-0...
* time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31
Data variables:
da (__resample_dim__) float64 1.0 2.0 nan
da (time) float64 1.0 2.0 nan
"""
return self._reduce_without_squeeze_warn(
duck_array_ops.median,
Expand Down Expand Up @@ -5343,6 +5369,19 @@ def cumprod(
class DataArrayGroupByAggregations:
_obj: DataArray

def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> DataArray:
raise NotImplementedError()

def reduce(
self,
func: Callable[..., Any],
Expand Down Expand Up @@ -6749,6 +6788,19 @@ def cumprod(
class DataArrayResampleAggregations:
_obj: DataArray

def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> DataArray:
raise NotImplementedError()

def reduce(
self,
func: Callable[..., Any],
Expand Down Expand Up @@ -7936,18 +7988,18 @@ def median(
labels (time) <U1 'a' 'b' 'c' 'c' 'b' 'a'
>>> da.resample(time="3M").median()
<xarray.DataArray (__resample_dim__: 3)>
<xarray.DataArray (time: 3)>
array([1., 2., 2.])
Coordinates:
* __resample_dim__ (__resample_dim__) datetime64[ns] 2001-01-31 ... 2001-0...
* time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31
Use ``skipna`` to control whether NaNs are ignored.
>>> da.resample(time="3M").median(skipna=False)
<xarray.DataArray (__resample_dim__: 3)>
<xarray.DataArray (time: 3)>
array([ 1., 2., nan])
Coordinates:
* __resample_dim__ (__resample_dim__) datetime64[ns] 2001-01-31 ... 2001-0...
* time (time) datetime64[ns] 2001-01-31 2001-04-30 2001-07-31
"""
return self._reduce_without_squeeze_warn(
duck_array_ops.median,
Expand Down Expand Up @@ -8034,17 +8086,17 @@ def cumsum(
<xarray.DataArray (time: 6)>
array([1., 2., 5., 5., 2., 2.])
Coordinates:
* time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30
labels (time) <U1 'a' 'b' 'c' 'c' 'b' 'a'
Dimensions without coordinates: time
Use ``skipna`` to control whether NaNs are ignored.
>>> da.resample(time="3M").cumsum(skipna=False)
<xarray.DataArray (time: 6)>
array([ 1., 2., 5., 5., 2., nan])
Coordinates:
* time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30
labels (time) <U1 'a' 'b' 'c' 'c' 'b' 'a'
Dimensions without coordinates: time
"""
return self._reduce_without_squeeze_warn(
duck_array_ops.cumsum,
Expand Down Expand Up @@ -8131,17 +8183,17 @@ def cumprod(
<xarray.DataArray (time: 6)>
array([1., 2., 6., 0., 2., 2.])
Coordinates:
* time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30
labels (time) <U1 'a' 'b' 'c' 'c' 'b' 'a'
Dimensions without coordinates: time
Use ``skipna`` to control whether NaNs are ignored.
>>> da.resample(time="3M").cumprod(skipna=False)
<xarray.DataArray (time: 6)>
array([ 1., 2., 6., 0., 2., nan])
Coordinates:
* time (time) datetime64[ns] 2001-01-31 2001-02-28 ... 2001-06-30
labels (time) <U1 'a' 'b' 'c' 'c' 'b' 'a'
Dimensions without coordinates: time
"""
return self._reduce_without_squeeze_warn(
duck_array_ops.cumprod,
Expand Down
26 changes: 26 additions & 0 deletions xarray/util/generate_aggregations.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,19 @@ def reduce(
class {obj}{cls}Aggregations:
_obj: {obj}
def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> {obj}:
raise NotImplementedError()
def reduce(
self,
func: Callable[..., Any],
Expand All @@ -113,6 +126,19 @@ def _flox_reduce(
class {obj}{cls}Aggregations:
_obj: {obj}
def _reduce_without_squeeze_warn(
self,
func: Callable[..., Any],
dim: Dims = None,
*,
axis: int | Sequence[int] | None = None,
keep_attrs: bool | None = None,
keepdims: bool = False,
shortcut: bool = True,
**kwargs: Any,
) -> {obj}:
raise NotImplementedError()
def reduce(
self,
func: Callable[..., Any],
Expand Down

0 comments on commit d6a3f2d

Please sign in to comment.