Skip to content

Commit

Permalink
Use flake8 rather than pycodestyle (#3010)
Browse files Browse the repository at this point in the history
* A few flake fixes

* isort

* bunch of flake8 errors fixed

* flake8 config

* pep8speaks config

* run flake8 in travis

* docs to flake8

* pep8speaks configs inherited from setup.cfg

* too much isort, skipped base __init__

* imports

* install flake8 in travis

* update 3.6 reqs
  • Loading branch information
max-sixty authored and shoyer committed Jun 12, 2019
1 parent fda6056 commit 3429ca2
Show file tree
Hide file tree
Showing 47 changed files with 116 additions and 134 deletions.
18 changes: 4 additions & 14 deletions .pep8speaks.yml
Original file line number Diff line number Diff line change
@@ -1,16 +1,6 @@
# File : .pep8speaks.yml

# This should be kept in sync with the duplicate config in the [pycodestyle]
# block of setup.cfg.
# https://github.com/OrkoHunter/pep8speaks for more info
# pep8speaks will use the flake8 configs in `setup.cfg`

scanner:
diff_only: False # If True, errors caused by only the patch are shown

pycodestyle:
max-line-length: 79
ignore: # Errors and warnings to ignore
- E402 # module level import not at top of file
- E731 # do not assign a lambda expression, use a def
- E741 # ambiguous variable name
- W503 # line break before binary operator
- W504 # line break after binary operator
diff_only: False
linter: flake8
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ script:
cd doc;
sphinx-build -n -j auto -b html -d _build/doctrees . _build/html;
elif [[ "$CONDA_ENV" == "lint" ]]; then
pycodestyle xarray ;
flake8 ;
elif [[ "$CONDA_ENV" == "py36-hypothesis" ]]; then
pytest properties ;
else
Expand Down
5 changes: 2 additions & 3 deletions asv_bench/benchmarks/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import, division, print_function

import itertools

import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements-py36.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ dependencies:
- pytest-cov
- pytest-env
- coveralls
- pycodestyle
- flake8
- numpy>=1.12
- pandas>=0.19
- scipy
Expand Down
2 changes: 1 addition & 1 deletion ci/requirements-py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ dependencies:
- pytest-cov
- pytest-env
- coveralls
- pycodestyle
- flake8
- numpy>=1.12
- pandas>=0.19
- scipy
Expand Down
2 changes: 1 addition & 1 deletion doc/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
# serve to show the default.
from __future__ import absolute_import, division, print_function

from contextlib import suppress
import datetime
import os
import subprocess
import sys
from contextlib import suppress

import xarray

Expand Down
8 changes: 3 additions & 5 deletions doc/contributing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -351,20 +351,18 @@ the more common ``PEP8`` issues:
- passing arguments should have spaces after commas, e.g. ``foo(arg1, arg2, kw1='bar')``

:ref:`Continuous Integration <contributing.ci>` will run
the `pycodestyle <http://pypi.python.org/pypi/pycodestyle>`_ tool
the `flake8 <http://flake8.pycqa.org/en/latest/>`_ tool
and report any stylistic errors in your code. Therefore, it is helpful before
submitting code to run the check yourself::
submitting code to run the check yourself:

pycodestyle xarray
flake8

Other recommended but optional tools for checking code quality (not currently
enforced in CI):

- `mypy <http://mypy-lang.org/>`_ performs static type checking, which can
make it easier to catch bugs. Please run ``mypy xarray`` if you annotate any
code with `type hints <https://docs.python.org/3/library/typing.html>`_.
- `flake8 <http://pypi.python.org/pypi/flake8>`_ includes a few more automated
checks than those enforced by pycodestyle.
- `isort <https://github.com/timothycrosley/isort>`_ will highlight
incorrectly sorted imports. ``isort -y`` will automatically fix them. See
also `flake8-isort <https://github.com/gforcada/flake8-isort>`_.
Expand Down
2 changes: 1 addition & 1 deletion doc/examples/_code/weather_data_setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import numpy as np
import pandas as pd
import seaborn as sns # pandas aware plotting library
import seaborn as sns # noqa, pandas aware plotting library

import xarray as xr

Expand Down
13 changes: 11 additions & 2 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,18 @@ env =
UVCDAT_ANONYMOUS_LOG=no

# This should be kept in sync with .pep8speaks.yml
[pycodestyle]
[flake8]
max-line-length=79
ignore=E402,E731,E741,W503,W504
ignore=
E402
E731
E741
W503
W504
# Unused imports; TODO: Allow typing to work without triggering errors
F401
exclude=
doc

[isort]
default_section=THIRDPARTY
Expand Down
1 change: 1 addition & 0 deletions xarray/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
""" isort:skip_file """
# flake8: noqa

from ._version import get_versions
Expand Down
8 changes: 4 additions & 4 deletions xarray/backends/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@
DataStores provide a uniform interface for saving and loading data in different
formats. They should not be used directly, but rather through Dataset objects.
"""
from .common import AbstractDataStore
from .file_manager import FileManager, CachingFileManager, DummyFileManager
from .cfgrib_ import CfGribDataStore
from .common import AbstractDataStore
from .file_manager import CachingFileManager, DummyFileManager, FileManager
from .h5netcdf_ import H5NetCDFStore
from .memory import InMemoryDataStore
from .netCDF4_ import NetCDF4DataStore
from .pseudonetcdf_ import PseudoNetCDFDataStore
from .pydap_ import PydapDataStore
from .pynio_ import NioDataStore
from .scipy_ import ScipyDataStore
from .h5netcdf_ import H5NetCDFStore
from .pseudonetcdf_ import PseudoNetCDFDataStore
from .zarr import ZarrStore

__all__ = [
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/file_manager.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import contextlib
import threading
from typing import Any, Dict
import warnings
from typing import Any, Dict

from ..core import utils
from ..core.options import OPTIONS
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/locks.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import multiprocessing
import threading
from typing import Any, MutableMapping
import weakref
from typing import Any, MutableMapping

try:
from dask.utils import SerializableLock
Expand Down
3 changes: 2 additions & 1 deletion xarray/backends/netCDF4_.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,7 +174,7 @@ def _force_native_endianness(var):
# if endian exists, remove it from the encoding.
var.encoding.pop('endian', None)
# check to see if encoding has a value for endian its 'native'
if not var.encoding.get('endian', 'native') is 'native':
if not var.encoding.get('endian', 'native') == 'native':
raise NotImplementedError("Attempt to write non-native endian type, "
"this is not supported by the netCDF4 "
"python library.")
Expand Down Expand Up @@ -237,6 +237,7 @@ def _extract_nc4_variable_encoding(variable, raise_on_invalid=False,

class GroupWrapper:
"""Wrap netCDF4.Group objects so closing them closes the root group."""

def __init__(self, value):
self.value = value

Expand Down
2 changes: 1 addition & 1 deletion xarray/coding/variables.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Coders for individual Variable objects."""
from typing import Any
import warnings
from functools import partial
from typing import Any

import numpy as np
import pandas as pd
Expand Down
1 change: 0 additions & 1 deletion xarray/core/accessor_str.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@

from .computation import apply_ufunc


_cpython_optimized_encoders = (
"utf-8", "utf8", "latin-1", "latin1", "iso-8859-1", "mbcs", "ascii"
)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/alignment.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import numpy as np
import pandas as pd

from . import utils, dtypes
from . import dtypes, utils
from .indexing import get_indexer_nd
from .utils import is_dict_like, is_full_slice
from .variable import IndexVariable, Variable
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,12 @@

import pandas as pd

from . import utils, dtypes
from . import dtypes, utils
from .alignment import align
from .computation import result_name
from .merge import merge
from .variable import IndexVariable, Variable, as_variable
from .variable import concat as concat_vars
from .computation import result_name


def concat(objs, dim=None, data_vars='all', coords='different',
Expand Down
6 changes: 3 additions & 3 deletions xarray/core/common.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
from collections import OrderedDict
from contextlib import suppress
from textwrap import dedent
from typing import (Any, Callable, Hashable, Iterable, Iterator, List, Mapping,
MutableMapping, Optional, Tuple, TypeVar, Union)
from typing import (
Any, Callable, Hashable, Iterable, Iterator, List, Mapping, MutableMapping,
Optional, Tuple, TypeVar, Union)

import numpy as np
import pandas as pd
Expand All @@ -13,7 +14,6 @@
from .pycompat import dask_array_type
from .utils import Frozen, ReprObject, SortedKeysDict, either_dict_or_kwargs


# Used as a sentinel value to indicate a all dimensions
ALL_DIMS = ReprObject('<all-dims>')

Expand Down
7 changes: 3 additions & 4 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
from distutils.version import LooseVersion
from numbers import Number
from typing import (
Any, Callable, Dict, List, Optional, Set, Tuple, TypeVar, Union, Sequence)
Any, Callable, Dict, List, Optional, Sequence, Set, Tuple, TypeVar, Union)

import numpy as np
import pandas as pd
Expand All @@ -35,8 +35,7 @@
from .pycompat import TYPE_CHECKING, dask_array_type
from .utils import (
Frozen, SortedKeysDict, _check_inplace, decode_numpy_dict_values,
either_dict_or_kwargs, ensure_us_time_resolution, hashable, is_dict_like,
maybe_wrap_array)
either_dict_or_kwargs, hashable, maybe_wrap_array)
from .variable import IndexVariable, Variable, as_variable, broadcast_variables

if TYPE_CHECKING:
Expand Down Expand Up @@ -4145,7 +4144,7 @@ def _integrate_one(self, coord, datetime_unit=None):
from .variable import Variable

if coord not in self.variables and coord not in self.dims:
raise ValueError('Coordinate {} does not exist.'.format(dim))
raise ValueError('Coordinate {} does not exist.'.format(coord))

coord_var = self[coord].variable
if coord_var.ndim != 1:
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
accept or return xarray objects.
"""
import contextlib
from functools import partial
import inspect
import warnings
from functools import partial

import numpy as np
import pandas as pd
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/missing.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from . import utils
from .common import _contains_datetime_like_objects
from .computation import apply_ufunc
from .duck_array_ops import dask_array_type, datetime_to_numeric
from .duck_array_ops import dask_array_type
from .utils import OrderedSet, is_scalar
from .variable import Variable, broadcast_variables

Expand Down
2 changes: 1 addition & 1 deletion xarray/core/options.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _warn_on_setting_enable_cftimeindex(enable_cftimeindex):
def _get_keep_attrs(default):
global_choice = OPTIONS['keep_attrs']

if global_choice is 'default':
if global_choice == 'default':
return default
elif global_choice in [True, False]:
return global_choice
Expand Down
10 changes: 6 additions & 4 deletions xarray/core/resample_cftime.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,14 +36,16 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.

from ..coding.cftimeindex import CFTimeIndex
from ..coding.cftime_offsets import (cftime_range, normalize_date,
Day, MonthEnd, QuarterEnd, YearEnd,
CFTIME_TICKS, to_offset)
import datetime

import numpy as np
import pandas as pd

from ..coding.cftime_offsets import (
CFTIME_TICKS, Day, MonthEnd, QuarterEnd, YearEnd, cftime_range,
normalize_date, to_offset)
from ..coding.cftimeindex import CFTimeIndex


class CFTimeGrouper:
"""This is a simple container for the grouping parameters that implements a
Expand Down
4 changes: 2 additions & 2 deletions xarray/core/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
from . import dtypes, duck_array_ops, utils
from .dask_array_ops import dask_rolling_wrapper
from .ops import (
bn, has_bottleneck, inject_coarsen_methods,
inject_bottleneck_rolling_methods, inject_datasetrolling_methods)
bn, has_bottleneck, inject_bottleneck_rolling_methods,
inject_coarsen_methods, inject_datasetrolling_methods)
from .pycompat import dask_array_type


Expand Down
8 changes: 4 additions & 4 deletions xarray/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,16 +7,16 @@
import re
import warnings
from collections import OrderedDict
from typing import (AbstractSet, Any, Callable, Container, Dict, Hashable,
Iterable, Iterator, Optional, Sequence,
Tuple, TypeVar, cast)
from typing import (
AbstractSet, Any, Callable, Container, Dict, Hashable, Iterable, Iterator,
Mapping, MutableMapping, MutableSet, Optional, Sequence, Tuple, TypeVar,
cast)

import numpy as np
import pandas as pd

from .pycompat import dask_array_type

from typing import Mapping, MutableMapping, MutableSet
try: # Fix typed collections in Python 3.5.0~3.5.2
from .pycompat import Mapping, MutableMapping, MutableSet # noqa: F811
except ImportError:
Expand Down
4 changes: 1 addition & 3 deletions xarray/plot/__init__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
from .plot import (plot, line, step, contourf, contour,
hist, imshow, pcolormesh)

from .facetgrid import FacetGrid
from .plot import contour, contourf, hist, imshow, line, pcolormesh, plot, step

__all__ = [
'plot',
Expand Down
7 changes: 3 additions & 4 deletions xarray/plot/facetgrid.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import functools
import itertools
import warnings
from inspect import getfullargspec

import numpy as np

from ..core.formatting import format_item
from .utils import (
_infer_xy_labels, _process_cmap_cbar_kwargs,
import_matplotlib_pyplot, label_from_attrs)
_infer_xy_labels, _process_cmap_cbar_kwargs, import_matplotlib_pyplot,
label_from_attrs)

# Overrides axes.labelsize, xtick.major.size, ytick.major.size
# from mpl.rcParams
Expand Down Expand Up @@ -483,7 +482,7 @@ def map(self, func, *args, **kwargs):
# TODO: better way to verify that an artist is mappable?
# https://stackoverflow.com/questions/33023036/is-it-possible-to-detect-if-a-matplotlib-artist-is-a-mappable-suitable-for-use-w#33023522
if (maybe_mappable and
hasattr(maybe_mappable, 'autoscale_None')):
hasattr(maybe_mappable, 'autoscale_None')):
self._mappables.append(maybe_mappable)

self._finalize_grid(*args[:2])
Expand Down
Loading

0 comments on commit 3429ca2

Please sign in to comment.