Skip to content

Commit

Permalink
Closes pydata#3409
Browse files Browse the repository at this point in the history
  • Loading branch information
Guido Imperiale committed Nov 15, 2019
1 parent 7b4a286 commit be333e4
Show file tree
Hide file tree
Showing 3 changed files with 40 additions and 4 deletions.
2 changes: 1 addition & 1 deletion ci/azure/install.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ steps:
--pre \
--upgrade \
matplotlib \
numpy \
pandas \
scipy
# numpy \ # FIXME https://github.com/pydata/xarray/issues/3409
pip install \
--no-deps \
--upgrade \
Expand Down
7 changes: 6 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,12 @@ Bug fixes
(:issue:`3402`). By `Deepak Cherian <https://github.com/dcherian/>`_
- Allow appending datetime and bool data variables to zarr stores.
(:issue:`3480`). By `Akihiro Matsukawa <https://github.com/amatsukawa/>`_.
- Add support for numpy >=1.18 (:issue:`3409`).
By `Guido Imperiale <https://github.com/crusaderky>`_.
- Add support for pandas >=0.26 (:issue:`3440`).
By `Deepak Cherian <https://github.com/dcherian>`_.
- Add support for pseudonetcdf >=3.1 (:pull:`3485`).
By `Barron Henderson <https://github.com/barronh>`_.

Documentation
~~~~~~~~~~~~~
Expand All @@ -118,7 +124,6 @@ Documentation

Internal Changes
~~~~~~~~~~~~~~~~

- Added integration tests against `pint <https://pint.readthedocs.io/>`_.
(:pull:`3238`, :pull:`3447`, :pull:`3493`, :pull:`3508`)
by `Justus Magin <https://github.com/keewis>`_.
Expand Down
35 changes: 33 additions & 2 deletions xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

import numpy as np
import pandas as pd
from distutils.version import LooseVersion

from . import dask_array_ops, dtypes, npcompat, nputils
from .nputils import nanfirst, nanlast
Expand Down Expand Up @@ -351,6 +352,32 @@ def f(values, axis=None, skipna=None, **kwargs):
_mean = _create_nan_agg_method("mean")


def _datetime_crude_nanmin(array):
"""Implement nanmin for datetime64 arrays, with caveats:
- can't accept an axis parameter
- will return incorrect results if the array exclusively contains NaT
"""
if LooseVersion(np.__version__) < "1.18":
# numpy.min < 1.18 incorrectly skips NaT - which we exploit here
return min(array, skipna=False)
# This requires numpy >= 1.15

from .dataarray import DataArray
from .variable import Variable

if isinstance(array, (DataArray, Variable)):
array = array.data
array = array.ravel()
array = array[~pandas_isnull(array)]

assert array.dtype.kind in "Mm"
assert array.dtype.itemsize == 8
initial = np.array(2 ** 63 - 1, dtype=array.dtype)

return min(array, initial=initial, skipna=False)


def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float):
"""Convert an array containing datetime-like data to an array of floats.
Expand All @@ -370,7 +397,10 @@ def datetime_to_numeric(array, offset=None, datetime_unit=None, dtype=float):
"""
# TODO: make this function dask-compatible?
if offset is None:
offset = array.min()
if array.dtype.kind in "Mm":
offset = _datetime_crude_nanmin(array)
else:
offset = min(array)
array = array - offset

if not hasattr(array, "dtype"): # scalar is converted to 0d-array
Expand Down Expand Up @@ -401,7 +431,8 @@ def mean(array, axis=None, skipna=None, **kwargs):

array = asarray(array)
if array.dtype.kind in "Mm":
offset = min(array)
offset = _datetime_crude_nanmin(array)

# xarray always uses np.datetime64[ns] for np.datetime64 data
dtype = "timedelta64[ns]"
return (
Expand Down

0 comments on commit be333e4

Please sign in to comment.