Skip to content
forked from pydata/xarray

Commit

Permalink
Merge branch 'main' into groupby-save-codes-new
Browse files Browse the repository at this point in the history
* main:
  absolufy-imports - No relative imports - PEP8 (pydata#7204)
  [skip-ci] whats-new for dev (pydata#7351)
  Whats-new: 2022.12.0 (pydata#7345)
  Fix assign_coords resetting all dimension coords to default index (pydata#7347)
  • Loading branch information
dcherian committed Dec 8, 2022
2 parents 1ca5b90 + 6e77f5e commit 284cff3
Show file tree
Hide file tree
Showing 121 changed files with 961 additions and 817 deletions.
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ repos:
- id: check-yaml
- id: debug-statements
- id: mixed-line-ending
- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
- id: absolufy-imports
name: absolufy-imports
# This wants to go before isort & flake8
- repo: https://github.com/PyCQA/autoflake
rev: "v2.0.0"
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/dataarray_missing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask


def make_bench_data(shape, frac_nan, chunks):
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import pandas as pd

import xarray as xr

from . import _skip_slow, randint, randn, requires_dask
from asv_bench.benchmarks import _skip_slow, randint, randn, requires_dask

try:
import dask
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import _skip_slow, parameterized, requires_dask
from asv_bench.benchmarks import _skip_slow, parameterized, requires_dask


class GroupBy:
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randint, randn, requires_dask
from asv_bench.benchmarks import parameterized, randint, randn, requires_dask

nx = 2000
ny = 1000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/interp.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

nx = 1500
ny = 1000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized
from asv_bench.benchmarks import parameterized


class MultiIndexSeries:
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/polyfit.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import numpy as np

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

NDEGS = (2, 5, 20)
NX = (10**2, 10**6)
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/reindexing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import numpy as np

import xarray as xr

from . import requires_dask
from asv_bench.benchmarks import requires_dask

ntime = 500
nx = 50
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

nx = 300
long_nx = 30000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/unstacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import requires_dask, requires_sparse
from asv_bench.benchmarks import requires_dask, requires_sparse


class Unstacking:
Expand Down
45 changes: 42 additions & 3 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,48 @@ What's New
np.random.seed(123456)
.. _whats-new.2022.11.1:
v2022.11.1 (unreleased)
.. _whats-new.2022.12.1:

v2022.12.1 (unreleased)
-----------------------

New Features
~~~~~~~~~~~~


Breaking changes
~~~~~~~~~~~~~~~~


Deprecations
~~~~~~~~~~~~


Bug fixes
~~~~~~~~~


Documentation
~~~~~~~~~~~~~


Internal Changes
~~~~~~~~~~~~~~~~
- Add the pre-commit hook `absolufy-imports` to convert relative xarray imports to
absolute imports (:pull:`7204`).
By `Jimmy Westling <https://github.com/illviljan>`_.

.. _whats-new.2022.12.0:

v2022.12.0 (2022 Dec 2)
-----------------------

This release includes a number of bug fixes and experimental support for Zarr V3.
Thanks to the 16 contributors to this release:
Deepak Cherian, Francesco Zanetta, Gregory Lee, Illviljan, Joe Hamman, Justus Magin, Luke Conibear, Mark Harfouche, Mathias Hauser,
Mick, Mike Taves, Sam Levang, Spencer Clark, Tom Nicholas, Wei Ji, templiert

New Features
~~~~~~~~~~~~
- Enable using `offset` and `origin` arguments in :py:meth:`DataArray.resample`
Expand Down Expand Up @@ -65,6 +102,8 @@ Bug fixes
By `Michael Niklas <https://github.com/headtr1ck>`_.
- Fix multiple reads on fsspec S3 files by resetting file pointer to 0 when reading file streams (:issue:`6813`, :pull:`7304`).
By `David Hoese <https://github.com/djhoese>`_ and `Wei Ji Leong <https://github.com/weiji14>`_.
- Fix :py:meth:`Dataset.assign_coords` resetting all dimension coordinates to default (pandas) index (:issue:`7346`, :pull:`7347`).
By `Benoît Bovy <https://github.com/benbovy>`_.

Documentation
~~~~~~~~~~~~~
Expand Down Expand Up @@ -2601,7 +2640,7 @@ Breaking changes
have removed the internal use of the ``OrderedDict`` in favor of Python's builtin
``dict`` object which is now ordered itself. This change will be most obvious when
interacting with the ``attrs`` property on Dataset and DataArray objects.
(:issue:`3380`, :pull:`3389`). HBy `Joeamman <https://github.com/jhamman>`_.
(:issue:`3380`, :pull:`3389`). By `Joe Hamman <https://github.com/jhamman>`_.

New functions/methods
~~~~~~~~~~~~~~~~~~~~~
Expand Down
45 changes: 24 additions & 21 deletions xarray/__init__.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
from . import testing, tutorial
from .backends.api import (
from xarray import testing, tutorial
from xarray.backends.api import (
load_dataarray,
load_dataset,
open_dataarray,
open_dataset,
open_mfdataset,
save_mfdataset,
)
from .backends.rasterio_ import open_rasterio
from .backends.zarr import open_zarr
from .coding.cftime_offsets import cftime_range, date_range, date_range_like
from .coding.cftimeindex import CFTimeIndex
from .coding.frequencies import infer_freq
from .conventions import SerializationWarning, decode_cf
from .core.alignment import align, broadcast
from .core.combine import combine_by_coords, combine_nested
from .core.common import ALL_DIMS, full_like, ones_like, zeros_like
from .core.computation import (
from xarray.backends.rasterio_ import open_rasterio
from xarray.backends.zarr import open_zarr
from xarray.coding.cftime_offsets import cftime_range, date_range, date_range_like
from xarray.coding.cftimeindex import CFTimeIndex
from xarray.coding.frequencies import infer_freq
from xarray.conventions import SerializationWarning, decode_cf
from xarray.core.alignment import align, broadcast
from xarray.core.combine import combine_by_coords, combine_nested
from xarray.core.common import ALL_DIMS, full_like, ones_like, zeros_like
from xarray.core.computation import (
apply_ufunc,
corr,
cov,
Expand All @@ -26,15 +26,18 @@
unify_chunks,
where,
)
from .core.concat import concat
from .core.dataarray import DataArray
from .core.dataset import Dataset
from .core.extensions import register_dataarray_accessor, register_dataset_accessor
from .core.merge import Context, MergeError, merge
from .core.options import get_options, set_options
from .core.parallel import map_blocks
from .core.variable import Coordinate, IndexVariable, Variable, as_variable
from .util.print_versions import show_versions
from xarray.core.concat import concat
from xarray.core.dataarray import DataArray
from xarray.core.dataset import Dataset
from xarray.core.extensions import (
register_dataarray_accessor,
register_dataset_accessor,
)
from xarray.core.merge import Context, MergeError, merge
from xarray.core.options import get_options, set_options
from xarray.core.parallel import map_blocks
from xarray.core.variable import Coordinate, IndexVariable, Variable, as_variable
from xarray.util.print_versions import show_versions

try:
from importlib.metadata import version as _version
Expand Down
33 changes: 20 additions & 13 deletions xarray/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,26 @@
DataStores provide a uniform interface for saving and loading data in different
formats. They should not be used directly, but rather through Dataset objects.
"""
from .cfgrib_ import CfGribDataStore
from .common import AbstractDataStore, BackendArray, BackendEntrypoint
from .file_manager import CachingFileManager, DummyFileManager, FileManager
from .h5netcdf_ import H5netcdfBackendEntrypoint, H5NetCDFStore
from .memory import InMemoryDataStore
from .netCDF4_ import NetCDF4BackendEntrypoint, NetCDF4DataStore
from .plugins import list_engines
from .pseudonetcdf_ import PseudoNetCDFBackendEntrypoint, PseudoNetCDFDataStore
from .pydap_ import PydapBackendEntrypoint, PydapDataStore
from .pynio_ import NioDataStore
from .scipy_ import ScipyBackendEntrypoint, ScipyDataStore
from .store import StoreBackendEntrypoint
from .zarr import ZarrBackendEntrypoint, ZarrStore
from xarray.backends.cfgrib_ import CfGribDataStore
from xarray.backends.common import AbstractDataStore, BackendArray, BackendEntrypoint
from xarray.backends.file_manager import (
CachingFileManager,
DummyFileManager,
FileManager,
)
from xarray.backends.h5netcdf_ import H5netcdfBackendEntrypoint, H5NetCDFStore
from xarray.backends.memory import InMemoryDataStore
from xarray.backends.netCDF4_ import NetCDF4BackendEntrypoint, NetCDF4DataStore
from xarray.backends.plugins import list_engines
from xarray.backends.pseudonetcdf_ import (
PseudoNetCDFBackendEntrypoint,
PseudoNetCDFDataStore,
)
from xarray.backends.pydap_ import PydapBackendEntrypoint, PydapDataStore
from xarray.backends.pynio_ import NioDataStore
from xarray.backends.scipy_ import ScipyBackendEntrypoint, ScipyDataStore
from xarray.backends.store import StoreBackendEntrypoint
from xarray.backends.zarr import ZarrBackendEntrypoint, ZarrStore

__all__ = [
"AbstractDataStore",
Expand Down
24 changes: 12 additions & 12 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,20 @@

import numpy as np

from .. import backends, conventions
from ..core import indexing
from ..core.combine import (
from xarray import backends, conventions
from xarray.backends import plugins
from xarray.backends.common import AbstractDataStore, ArrayWriter, _normalize_path
from xarray.backends.locks import _get_scheduler
from xarray.core import indexing
from xarray.core.combine import (
_infer_concat_order_from_positions,
_nested_combine,
combine_by_coords,
)
from ..core.dataarray import DataArray
from ..core.dataset import Dataset, _get_chunk, _maybe_chunk
from ..core.indexes import Index
from ..core.utils import is_remote_uri
from . import plugins
from .common import AbstractDataStore, ArrayWriter, _normalize_path
from .locks import _get_scheduler
from xarray.core.dataarray import DataArray
from xarray.core.dataset import Dataset, _get_chunk, _maybe_chunk
from xarray.core.indexes import Index
from xarray.core.utils import is_remote_uri

if TYPE_CHECKING:
try:
Expand All @@ -47,13 +47,13 @@
Delayed = None # type: ignore
from io import BufferedIOBase

from ..core.types import (
from xarray.backends.common import BackendEntrypoint
from xarray.core.types import (
CombineAttrsOptions,
CompatOptions,
JoinOptions,
NestedSequence,
)
from .common import BackendEntrypoint

T_NetcdfEngine = Literal["netcdf4", "scipy", "h5netcdf"]
T_Engine = Union[
Expand Down
12 changes: 6 additions & 6 deletions xarray/backends/cfgrib_.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,18 @@

import numpy as np

from ..core import indexing
from ..core.utils import Frozen, FrozenDict, close_on_error, module_available
from ..core.variable import Variable
from .common import (
from xarray.backends.common import (
BACKEND_ENTRYPOINTS,
AbstractDataStore,
BackendArray,
BackendEntrypoint,
_normalize_path,
)
from .locks import SerializableLock, ensure_lock
from .store import StoreBackendEntrypoint
from xarray.backends.locks import SerializableLock, ensure_lock
from xarray.backends.store import StoreBackendEntrypoint
from xarray.core import indexing
from xarray.core.utils import Frozen, FrozenDict, close_on_error, module_available
from xarray.core.variable import Variable

# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
# in most circumstances. See:
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@

import numpy as np

from ..conventions import cf_encoder
from ..core import indexing
from ..core.pycompat import is_duck_dask_array
from ..core.utils import FrozenDict, NdimSizeLenMixin, is_remote_uri
from xarray.conventions import cf_encoder
from xarray.core import indexing
from xarray.core.pycompat import is_duck_dask_array
from xarray.core.utils import FrozenDict, NdimSizeLenMixin, is_remote_uri

if TYPE_CHECKING:
from io import BufferedIOBase
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/file_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
import warnings
from typing import Any, Hashable

from ..core import utils
from ..core.options import OPTIONS
from .locks import acquire
from .lru_cache import LRUCache
from xarray.backends.locks import acquire
from xarray.backends.lru_cache import LRUCache
from xarray.core import utils
from xarray.core.options import OPTIONS

# Global cache for storing open files.
FILE_CACHE: LRUCache[Any, io.IOBase] = LRUCache(
Expand Down
Loading

0 comments on commit 284cff3

Please sign in to comment.