Skip to content

Commit

Permalink
Fixed flake8 gitlab error (landlab#1512)
Browse files Browse the repository at this point in the history
* update the pre-commit flake8 hook

* remove comprehension lint

* add flake8-comprehension to hook

* add news fragments

* remove some additional lint from the tests folder
  • Loading branch information
mcflugen authored Nov 18, 2022
1 parent 0b2b594 commit e05c8b4
Show file tree
Hide file tree
Showing 68 changed files with 254 additions and 248 deletions.
13 changes: 9 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,16 @@ repos:
require_serial: true
types_or: [python, pyi, jupyter]
additional_dependencies: [".[jupyter]"]
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2

- repo: https://github.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8
# additional_dependencies: [flake8-bugbear]
- id: flake8
additional_dependencies:
- flake8-comprehensions
# - flake8-bugbear
# - flake8-simplify

- repo: https://gitlab.com/iamlikeme/nbhooks
rev: 1.0.0
hooks:
Expand Down
2 changes: 1 addition & 1 deletion landlab/bmi/standard_names.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,4 @@
}


LANDLAB_NAME = dict((value, key) for key, value in STANDARD_NAME.items() if key)
LANDLAB_NAME = {value: key for key, value in STANDARD_NAME.items() if key}
4 changes: 2 additions & 2 deletions landlab/cmd/landlab.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ def grids(ctx):
verbose = ctx.parent.parent.params["verbose"]
silent = ctx.parent.parent.params["silent"]

index = dict(grids={})
index = {"grids": {}}
for cls in GRIDS:
index["grids"][cls.__name__] = _categorize_class(cls)
index["grids"][cls.__name__]["field-io"] += [
Expand Down Expand Up @@ -199,7 +199,7 @@ def components(ctx):

from sphinx.util.docstrings import prepare_docstring

index = dict(components={})
index = {"components": {}}
for cls in get_all_components():
if verbose and not silent:
out(f"indexing: {cls.__name__}")
Expand Down
2 changes: 1 addition & 1 deletion landlab/components/flexure/funcs.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def subside_point_load(load, loc, coords, params=None, out=None):
>>> print(round(dz.max(), 9) / 2.)
5.265e-07
"""
params = params or dict(eet=6500.0, youngs=7.0e10)
params = params or {"eet": 6500.0, "youngs": 7.0e10}
eet, youngs = params["eet"], params["youngs"]
gamma_mantle = params.get("gamma_mantle", 33000.0)

Expand Down
10 changes: 5 additions & 5 deletions landlab/components/flow_accum/flow_accum_to_n.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def __init__(self, delta, D, num_receivers):
"""

self.num_receivers = num_receivers
self.s = list()
self.s = []
self.delta = delta
self.D = D

Expand Down Expand Up @@ -135,7 +135,7 @@ def construct__stack(self, nodes):
try:
base = set(nodes)
except TypeError:
base = set([nodes])
base = {nodes}

# instantiate the time keeping variable i, and a variable to keep track
# of the visit time. Using visit time allows us to itterate through
Expand All @@ -151,7 +151,7 @@ def construct__stack(self, nodes):
num_visits[list(base)] += 1

i = 1
visited = set([])
visited = set()
for node_i in base:
# select the nodes to visit
visit = set(self.D[self.delta[node_i] : self.delta[node_i + 1]])
Expand All @@ -177,8 +177,8 @@ def construct__stack(self, nodes):
# increase counter
i += 1

visited = set([])
new_completes = set([])
visited = set()
new_completes = set()

for node_i in completed:

Expand Down
4 changes: 2 additions & 2 deletions landlab/components/lake_fill/lake_fill_barnes.py
Original file line number Diff line number Diff line change
Expand Up @@ -684,7 +684,7 @@ def _fill_to_flat_with_tracking(
>>> out == {8: deque([7]), 16: deque([15, 9, 14, 22])}
True
"""
lakemappings = dict()
lakemappings = {}
outlet_ID = self._grid.BAD_INDEX
while True:
try:
Expand Down Expand Up @@ -887,7 +887,7 @@ def _fill_to_slant_with_optional_tracking(
... 'ignore_overfill flag at component instantiation.')
ValueError was raised: Pit is overfilled due to creation of two outlets as the minimum gradient gets applied. Suppress this Error with the ignore_overfill flag at component instantiation.
"""
lakemappings = dict()
lakemappings = {}
outlet_ID = self._grid.BAD_INDEX
while True:
try:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from landlab import Component

_VALID_METHODS = set(["Constant", "PriestleyTaylor", "MeasuredRadiationPT", "Cosine"])
_VALID_METHODS = {"Constant", "PriestleyTaylor", "MeasuredRadiationPT", "Cosine"}


def _assert_method_is_valid(method):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from landlab import Component

_VALID_METHODS = set(["Grid"])
_VALID_METHODS = {"Grid"}
GRASS = 0
SHRUB = 1
TREE = 2
Expand Down
2 changes: 1 addition & 1 deletion landlab/components/radiation/radiation.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from landlab import Component

_VALID_METHODS = set(["Grid"])
_VALID_METHODS = {"Grid"}


def _assert_method_is_valid(method):
Expand Down
2 changes: 1 addition & 1 deletion landlab/components/soil_moisture/soil_moisture_dynamics.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from landlab import Component

_VALID_METHODS = set(["Grid", "Multi"])
_VALID_METHODS = {"Grid", "Multi"}


def assert_method_is_valid(method):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from landlab import Component

_VALID_METHODS = set(["Grid"])
_VALID_METHODS = {"Grid"}


def assert_method_is_valid(method):
Expand Down
2 changes: 1 addition & 1 deletion landlab/graph/graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ def thaw(self):
self._frozen = False

def _add_variable(self, name, var, dims=None, attrs=None):
kwds = dict(data=var, dims=dims, attrs=attrs)
kwds = {"data": var, "dims": dims, "attrs": attrs}
self.ds.update({name: xr.DataArray(**kwds)})
if self._frozen:
self.freeze()
Expand Down
2 changes: 1 addition & 1 deletion landlab/grid/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -454,7 +454,7 @@ def fields(self, include="*", exclude=None):
if isinstance(exclude, str):
exclude = [exclude]

layer_groups = set(["_".join(["layer", at]) for at in self.groups])
layer_groups = {"_".join(["layer", at]) for at in self.groups}
layer_groups.add("layer")

canonical_names = set()
Expand Down
7 changes: 4 additions & 3 deletions landlab/grid/create.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,9 +114,10 @@ def add_field_from_function(grid, name, functions, at="node"):
ModelGrid
The grid with the new field.
"""
valid_functions = set(_SYNTHETIC_FIELD_CONSTRUCTORS) | set(
["read_esri_ascii", "read_netcdf"]
)
valid_functions = set(_SYNTHETIC_FIELD_CONSTRUCTORS) | {
"read_esri_ascii",
"read_netcdf",
}

for func_name, func_args in as_list_of_tuples(functions):
if func_name not in valid_functions:
Expand Down
4 changes: 2 additions & 2 deletions landlab/io/esri_ascii.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,7 +231,7 @@ def _header_is_valid(header):
The header has the key but its values is of the wrong type.
"""
header_keys = set(header)
required_keys = set(["ncols", "nrows", "cellsize"])
required_keys = {"ncols", "nrows", "cellsize"}

if not required_keys.issubset(header_keys):
raise MissingRequiredKeyError(", ".join(required_keys - header_keys))
Expand Down Expand Up @@ -336,7 +336,7 @@ def read_asc_header(asc_file):
Traceback (most recent call last):
KeyTypeError: Unable to convert nrows to <type 'int'>
"""
header = dict()
header = {}
for (key, value) in _header_lines(asc_file):
header[key] = value

Expand Down
12 changes: 5 additions & 7 deletions landlab/io/netcdf/dump.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,13 +131,11 @@ def to_netcdf(

def _add_time_dimension_to_dataset(dataset, time=0.0):
"""Add a time dimension to all variables except those at_layer."""
names = set(
[
name
for name in dataset.variables
if name.startswith("at_") and not name.startswith("at_layer")
]
)
names = {
name
for name in dataset.variables
if name.startswith("at_") and not name.startswith("at_layer")
}

for name in names:
dataset[name] = (("time",) + dataset[name].dims, dataset[name].values[None])
Expand Down
2 changes: 1 addition & 1 deletion landlab/io/netcdf/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def _read_netcdf_structured_data(root):
Data values, reshaped to match that of the grid. Keys are the
variable names as read from the NetCDF file.
"""
fields = dict()
fields = {}
grid_mapping_exists = False
grid_mapping_dict = None
for (name, var) in root.variables.items():
Expand Down
11 changes: 7 additions & 4 deletions landlab/io/netcdf/write.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def _get_dimension_sizes(shape):
"""
names = _AXIS_DIMENSION_NAMES[-1 : -(len(shape) + 1) : -1]

sizes = dict()
sizes = {}
for (axis, name) in enumerate(names):
sizes[name] = shape[-(axis + 1)]

Expand Down Expand Up @@ -530,9 +530,12 @@ def _set_netcdf_grid_mapping_variable(root, grid_mapping):
setattr(var, attr, grid_mapping[attr])


_VALID_NETCDF_FORMATS = set(
["NETCDF3_CLASSIC", "NETCDF3_64BIT", "NETCDF4_CLASSIC", "NETCDF4"]
)
_VALID_NETCDF_FORMATS = {
"NETCDF3_CLASSIC",
"NETCDF3_64BIT",
"NETCDF4_CLASSIC",
"NETCDF4",
}


def _guess_at_location(fields, names):
Expand Down
8 changes: 4 additions & 4 deletions landlab/io/shapefile/read_shapefile.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,10 +258,10 @@ def read_shapefile(

sf = _read_shapefile(file, dbf)

link_field_conversion = link_field_conversion or dict()
node_field_conversion = node_field_conversion or dict()
link_field_dtype = link_field_dtype or dict()
node_field_dtype = node_field_dtype or dict()
link_field_conversion = link_field_conversion or {}
node_field_conversion = node_field_conversion or {}
link_field_dtype = link_field_dtype or {}
node_field_dtype = node_field_dtype or {}

if sf.shapeTypeName != "POLYLINE":
raise ValueError(
Expand Down
2 changes: 1 addition & 1 deletion landlab/layers/eventlayers.py
Original file line number Diff line number Diff line change
Expand Up @@ -528,7 +528,7 @@ def __init__(self, number_of_stacks, allocated=0):
self._number_of_layers = 0
self._number_of_stacks = number_of_stacks
self._surface_index = np.zeros(number_of_stacks, dtype=int)
self._attrs = dict()
self._attrs = {}

dims = (self.number_of_layers, self.number_of_stacks)
self._attrs["_dz"] = np.empty(dims, dtype=float)
Expand Down
2 changes: 1 addition & 1 deletion landlab/plot/imshow.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def _imshow_grid_values(
+ grid.xy_of_lower_left[0]
)

kwds = dict(cmap=cmap)
kwds = {"cmap": cmap}
(kwds["vmin"], kwds["vmax"]) = (values.min(), values.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
Expand Down
16 changes: 10 additions & 6 deletions landlab/plot/imshowhs.py
Original file line number Diff line number Diff line change
Expand Up @@ -445,9 +445,13 @@ def _imshowhs_grid_values(

# Poperties of bounding box of colorbar label, if used:
if add_label_bbox:
bbox_prop = dict(
boxstyle="round", pad=0.1, facecolor="white", alpha=0.7, edgecolor="white"
)
bbox_prop = {
"boxstyle": "round",
"pad": 0.1,
"facecolor": "white",
"alpha": 0.7,
"edgecolor": "white",
}
else:
bbox_prop = None

Expand Down Expand Up @@ -501,7 +505,7 @@ def _imshowhs_grid_values(
blend_modes = ["hsv", "overlay", "soft"]
if plot_type == "DEM":

kwds = dict(cmap=cmap)
kwds = {"cmap": cmap}
(kwds["vmin"], kwds["vmax"]) = (values.min(), values.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
Expand Down Expand Up @@ -566,7 +570,7 @@ def _imshowhs_grid_values(
shape = (-1,)
val1 = values_at_node_drape1.reshape(shape)

kwds = dict(cmap=cmap)
kwds = {"cmap": cmap}
(kwds["vmin"], kwds["vmax"]) = (val1.min(), val1.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
Expand Down Expand Up @@ -699,7 +703,7 @@ def _imshowhs_grid_values(

if cmap2 is None:
cmap2 = plt.cm.terrain
kwds = dict(cmap=cmap2)
kwds = {"cmap": cmap2}
(kwds["vmin"], kwds["vmax"]) = (val2.min(), val2.max())
if (limits is None) and ((vmin is None) and (vmax is None)):
if symmetric_cbar:
Expand Down
2 changes: 1 addition & 1 deletion landlab/utils/decorators.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def _wrapped(grid):
ds = grid

if name not in ds:
setattr(grid, self._dataset, ds.update(dict(name=func(grid))))
setattr(grid, self._dataset, ds.update({"name": func(grid)}))

return getattr(grid, self._dataset)[name].values

Expand Down
3 changes: 3 additions & 0 deletions news/1512.misc
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

Fixed a broken *pre-commit* hook that caused an error when checking for lint
with *flake8*.
3 changes: 3 additions & 0 deletions news/1512.misc.1
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@

Added *flake8-comprehension* to the *flake8* *pre-commit* hook to identify
comprehension-related lint.
2 changes: 1 addition & 1 deletion notebooks/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
def collect_notebooks(src):
p = pathlib.Path(src)
if p.is_dir():
return set([_p.absolute() for _p in iter_notebooks_in_dir(p, src)])
return {_p.absolute() for _p in iter_notebooks_in_dir(p, src)}
else:
raise ValueError("{0}: not a directory".format(src))

Expand Down
4 changes: 2 additions & 2 deletions notebooks/run_notebook_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
def collect_notebooks(src):
p = pathlib.Path(src)
if p.is_dir():
return set([_p.absolute() for _p in iter_notebooks_in_dir(p, src)])
return {_p.absolute() for _p in iter_notebooks_in_dir(p, src)}
elif is_a_notebook(p):
return set([p.absolute()])
return {p.absolute()}
else:
raise ValueError("{0}: not a directory or a notebook".format(src))

Expand Down
Loading

0 comments on commit e05c8b4

Please sign in to comment.