Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

absolufy-imports - No relative imports - PEP8 #7204

Merged
merged 12 commits into from
Dec 7, 2022
Merged
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,11 @@ repos:
- id: check-yaml
- id: debug-statements
- id: mixed-line-ending
- repo: https://github.com/MarcoGorelli/absolufy-imports
rev: v0.3.1
hooks:
- id: absolufy-imports
name: absolufy-imports
# This wants to go before isort & flake8
- repo: https://github.com/PyCQA/autoflake
rev: "v2.0.0"
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/dataarray_missing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This makes our benchmarks fail unfortunately:

    STDERR -------->
Error:    Traceback (most recent call last):
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 1435, in <module>
       main()
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 1428, in main
       commands[mode](args)
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 1103, in main_discover
       list_benchmarks(benchmark_dir, fp)
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 1088, in list_benchmarks
       for benchmark in disc_benchmarks(root):
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 985, in disc_benchmarks
       for module in disc_modules(root_name, ignore_import_errors=ignore_import_errors):
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 967, in disc_modules
       for item in disc_modules(name, ignore_import_errors=ignore_import_errors):
     File "/home/runner/micromamba-root/envs/xarray-tests/lib/python3.10/site-packages/asv/benchmark.py", line 950, in disc_modules
       module = import_module(module_name)
     File "/home/runner/work/xarray/xarray/asv_bench/.asv/env/3e1d7de4e47af51e3e41695ae1884ae2/lib/python3.8/importlib/__init__.py", line 127, in import_module
       return _bootstrap._gcd_import(name[level:], package, level)
     File "<frozen importlib._bootstrap>", line 1014, in _gcd_import
     File "<frozen importlib._bootstrap>", line 991, in _find_and_load
     File "<frozen importlib._bootstrap>", line 975, in _find_and_load_unlocked
     File "<frozen importlib._bootstrap>", line 671, in _load_unlocked
     File "<frozen importlib._bootstrap_external>", line 843, in exec_module
     File "<frozen importlib._bootstrap>", line 219, in _call_with_frames_removed
     File "/home/runner/work/xarray/xarray/asv_bench/benchmarks/dataarray_missing.py", line 4, in <module>
       from asv_bench.benchmarks import parameterized, randn, requires_dask
   ModuleNotFoundError: No module named 'asv_bench'

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hey, thanks for using absolufy-imports - just for reference, in pandas we don't use it on asv_bench

https://github.com/pandas-dev/pandas/blob/4eb4729fb47bd160a39ec49f6af89dabfc63d3ac/.pre-commit-config.yaml#L8-L12

If you add a line to the hook with - files: ^xarray/, and revert the changes to asv_bench, then I think it should work fine



def make_bench_data(shape, frac_nan, chunks):
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/dataset_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import pandas as pd

import xarray as xr

from . import _skip_slow, randint, randn, requires_dask
from asv_bench.benchmarks import _skip_slow, randint, randn, requires_dask

try:
import dask
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import _skip_slow, parameterized, requires_dask
from asv_bench.benchmarks import _skip_slow, parameterized, requires_dask


class GroupBy:
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randint, randn, requires_dask
from asv_bench.benchmarks import parameterized, randint, randn, requires_dask

nx = 2000
ny = 1000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/interp.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

nx = 1500
ny = 1000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/pandas.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized
from asv_bench.benchmarks import parameterized


class MultiIndexSeries:
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/polyfit.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import numpy as np

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

NDEGS = (2, 5, 20)
NX = (10**2, 10**6)
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/reindexing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import numpy as np

import xarray as xr

from . import requires_dask
from asv_bench.benchmarks import requires_dask

ntime = 500
nx = 50
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import parameterized, randn, requires_dask
from asv_bench.benchmarks import parameterized, randn, requires_dask

nx = 300
long_nx = 30000
Expand Down
3 changes: 1 addition & 2 deletions asv_bench/benchmarks/unstacking.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,7 @@
import pandas as pd

import xarray as xr

from . import requires_dask, requires_sparse
from asv_bench.benchmarks import requires_dask, requires_sparse


class Unstacking:
Expand Down
4 changes: 3 additions & 1 deletion doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ Documentation

Internal Changes
~~~~~~~~~~~~~~~~

- Add the pre-commit hook `absolufy-imports` to convert relative xarray imports to
absolute imports (:pull:`7204`).
By `Jimmy Westling <https://github.com/illviljan>`_.

.. _whats-new.2022.12.0:

Expand Down
45 changes: 24 additions & 21 deletions xarray/__init__.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
from . import testing, tutorial
from .backends.api import (
from xarray import testing, tutorial
from xarray.backends.api import (
load_dataarray,
load_dataset,
open_dataarray,
open_dataset,
open_mfdataset,
save_mfdataset,
)
from .backends.rasterio_ import open_rasterio
from .backends.zarr import open_zarr
from .coding.cftime_offsets import cftime_range, date_range, date_range_like
from .coding.cftimeindex import CFTimeIndex
from .coding.frequencies import infer_freq
from .conventions import SerializationWarning, decode_cf
from .core.alignment import align, broadcast
from .core.combine import combine_by_coords, combine_nested
from .core.common import ALL_DIMS, full_like, ones_like, zeros_like
from .core.computation import (
from xarray.backends.rasterio_ import open_rasterio
from xarray.backends.zarr import open_zarr
from xarray.coding.cftime_offsets import cftime_range, date_range, date_range_like
from xarray.coding.cftimeindex import CFTimeIndex
from xarray.coding.frequencies import infer_freq
from xarray.conventions import SerializationWarning, decode_cf
from xarray.core.alignment import align, broadcast
from xarray.core.combine import combine_by_coords, combine_nested
from xarray.core.common import ALL_DIMS, full_like, ones_like, zeros_like
from xarray.core.computation import (
apply_ufunc,
corr,
cov,
Expand All @@ -26,15 +26,18 @@
unify_chunks,
where,
)
from .core.concat import concat
from .core.dataarray import DataArray
from .core.dataset import Dataset
from .core.extensions import register_dataarray_accessor, register_dataset_accessor
from .core.merge import Context, MergeError, merge
from .core.options import get_options, set_options
from .core.parallel import map_blocks
from .core.variable import Coordinate, IndexVariable, Variable, as_variable
from .util.print_versions import show_versions
from xarray.core.concat import concat
from xarray.core.dataarray import DataArray
from xarray.core.dataset import Dataset
from xarray.core.extensions import (
register_dataarray_accessor,
register_dataset_accessor,
)
from xarray.core.merge import Context, MergeError, merge
from xarray.core.options import get_options, set_options
from xarray.core.parallel import map_blocks
from xarray.core.variable import Coordinate, IndexVariable, Variable, as_variable
from xarray.util.print_versions import show_versions

try:
from importlib.metadata import version as _version
Expand Down
33 changes: 20 additions & 13 deletions xarray/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,19 +3,26 @@
DataStores provide a uniform interface for saving and loading data in different
formats. They should not be used directly, but rather through Dataset objects.
"""
from .cfgrib_ import CfGribDataStore
from .common import AbstractDataStore, BackendArray, BackendEntrypoint
from .file_manager import CachingFileManager, DummyFileManager, FileManager
from .h5netcdf_ import H5netcdfBackendEntrypoint, H5NetCDFStore
from .memory import InMemoryDataStore
from .netCDF4_ import NetCDF4BackendEntrypoint, NetCDF4DataStore
from .plugins import list_engines
from .pseudonetcdf_ import PseudoNetCDFBackendEntrypoint, PseudoNetCDFDataStore
from .pydap_ import PydapBackendEntrypoint, PydapDataStore
from .pynio_ import NioDataStore
from .scipy_ import ScipyBackendEntrypoint, ScipyDataStore
from .store import StoreBackendEntrypoint
from .zarr import ZarrBackendEntrypoint, ZarrStore
from xarray.backends.cfgrib_ import CfGribDataStore
from xarray.backends.common import AbstractDataStore, BackendArray, BackendEntrypoint
from xarray.backends.file_manager import (
CachingFileManager,
DummyFileManager,
FileManager,
)
from xarray.backends.h5netcdf_ import H5netcdfBackendEntrypoint, H5NetCDFStore
from xarray.backends.memory import InMemoryDataStore
from xarray.backends.netCDF4_ import NetCDF4BackendEntrypoint, NetCDF4DataStore
from xarray.backends.plugins import list_engines
from xarray.backends.pseudonetcdf_ import (
PseudoNetCDFBackendEntrypoint,
PseudoNetCDFDataStore,
)
from xarray.backends.pydap_ import PydapBackendEntrypoint, PydapDataStore
from xarray.backends.pynio_ import NioDataStore
from xarray.backends.scipy_ import ScipyBackendEntrypoint, ScipyDataStore
from xarray.backends.store import StoreBackendEntrypoint
from xarray.backends.zarr import ZarrBackendEntrypoint, ZarrStore

__all__ = [
"AbstractDataStore",
Expand Down
24 changes: 12 additions & 12 deletions xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,20 @@

import numpy as np

from .. import backends, conventions
from ..core import indexing
from ..core.combine import (
from xarray import backends, conventions
from xarray.backends import plugins
from xarray.backends.common import AbstractDataStore, ArrayWriter, _normalize_path
from xarray.backends.locks import _get_scheduler
from xarray.core import indexing
from xarray.core.combine import (
_infer_concat_order_from_positions,
_nested_combine,
combine_by_coords,
)
from ..core.dataarray import DataArray
from ..core.dataset import Dataset, _get_chunk, _maybe_chunk
from ..core.indexes import Index
from ..core.utils import is_remote_uri
from . import plugins
from .common import AbstractDataStore, ArrayWriter, _normalize_path
from .locks import _get_scheduler
from xarray.core.dataarray import DataArray
from xarray.core.dataset import Dataset, _get_chunk, _maybe_chunk
from xarray.core.indexes import Index
from xarray.core.utils import is_remote_uri

if TYPE_CHECKING:
try:
Expand All @@ -47,13 +47,13 @@
Delayed = None # type: ignore
from io import BufferedIOBase

from ..core.types import (
from xarray.backends.common import BackendEntrypoint
from xarray.core.types import (
CombineAttrsOptions,
CompatOptions,
JoinOptions,
NestedSequence,
)
from .common import BackendEntrypoint

T_NetcdfEngine = Literal["netcdf4", "scipy", "h5netcdf"]
T_Engine = Union[
Expand Down
12 changes: 6 additions & 6 deletions xarray/backends/cfgrib_.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,18 +5,18 @@

import numpy as np

from ..core import indexing
from ..core.utils import Frozen, FrozenDict, close_on_error, module_available
from ..core.variable import Variable
from .common import (
from xarray.backends.common import (
BACKEND_ENTRYPOINTS,
AbstractDataStore,
BackendArray,
BackendEntrypoint,
_normalize_path,
)
from .locks import SerializableLock, ensure_lock
from .store import StoreBackendEntrypoint
from xarray.backends.locks import SerializableLock, ensure_lock
from xarray.backends.store import StoreBackendEntrypoint
from xarray.core import indexing
from xarray.core.utils import Frozen, FrozenDict, close_on_error, module_available
from xarray.core.variable import Variable

# FIXME: Add a dedicated lock, even if ecCodes is supposed to be thread-safe
# in most circumstances. See:
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@

import numpy as np

from ..conventions import cf_encoder
from ..core import indexing
from ..core.pycompat import is_duck_dask_array
from ..core.utils import FrozenDict, NdimSizeLenMixin, is_remote_uri
from xarray.conventions import cf_encoder
from xarray.core import indexing
from xarray.core.pycompat import is_duck_dask_array
from xarray.core.utils import FrozenDict, NdimSizeLenMixin, is_remote_uri

if TYPE_CHECKING:
from io import BufferedIOBase
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/file_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@
import warnings
from typing import Any, Hashable

from ..core import utils
from ..core.options import OPTIONS
from .locks import acquire
from .lru_cache import LRUCache
from xarray.backends.locks import acquire
from xarray.backends.lru_cache import LRUCache
from xarray.core import utils
from xarray.core.options import OPTIONS

# Global cache for storing open files.
FILE_CACHE: LRUCache[Any, io.IOBase] = LRUCache(
Expand Down
28 changes: 14 additions & 14 deletions xarray/backends/h5netcdf_.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,32 +6,32 @@

from packaging.version import Version

from ..core import indexing
from ..core.utils import (
FrozenDict,
is_remote_uri,
module_available,
read_magic_number_from_file,
try_read_magic_number_from_file_or_path,
)
from ..core.variable import Variable
from .common import (
from xarray.backends.common import (
BACKEND_ENTRYPOINTS,
BackendEntrypoint,
WritableCFDataStore,
_normalize_path,
find_root_and_group,
)
from .file_manager import CachingFileManager, DummyFileManager
from .locks import HDF5_LOCK, combine_locks, ensure_lock, get_write_lock
from .netCDF4_ import (
from xarray.backends.file_manager import CachingFileManager, DummyFileManager
from xarray.backends.locks import HDF5_LOCK, combine_locks, ensure_lock, get_write_lock
from xarray.backends.netCDF4_ import (
BaseNetCDF4Array,
_encode_nc4_variable,
_extract_nc4_variable_encoding,
_get_datatype,
_nc4_require_group,
)
from .store import StoreBackendEntrypoint
from xarray.backends.store import StoreBackendEntrypoint
from xarray.core import indexing
from xarray.core.utils import (
FrozenDict,
is_remote_uri,
module_available,
read_magic_number_from_file,
try_read_magic_number_from_file_or_path,
)
from xarray.core.variable import Variable


class H5NetCDFArrayWrapper(BaseNetCDF4Array):
Expand Down
4 changes: 2 additions & 2 deletions xarray/backends/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

import numpy as np

from ..core.variable import Variable
from .common import AbstractWritableDataStore
from xarray.backends.common import AbstractWritableDataStore
from xarray.core.variable import Variable


class InMemoryDataStore(AbstractWritableDataStore):
Expand Down
Loading