Skip to content

Commit

Permalink
Merge pull request #218 from ome/pre-commit-ci-update-config
Browse files Browse the repository at this point in the history
[pre-commit.ci] pre-commit autoupdate
  • Loading branch information
sbesson authored Aug 27, 2022
2 parents 4f9cd24 + 4feb066 commit ba03145
Show file tree
Hide file tree
Showing 6 changed files with 58 additions and 52 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ repos:
args: [--target-version=py36]

- repo: https://github.com/asottile/pyupgrade
rev: v2.37.2
rev: v2.37.3
hooks:
- id: pyupgrade
args:
Expand All @@ -45,7 +45,7 @@ repos:
- --autofix

- repo: https://github.com/PyCQA/flake8
rev: 4.0.1
rev: 5.0.4
hooks:
- id: flake8
additional_dependencies: [
Expand Down
28 changes: 15 additions & 13 deletions ome_zarr/format.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,12 +128,12 @@ def version(self) -> str:

def matches(self, metadata: dict) -> bool:
version = self._get_metadata_version(metadata)
LOGGER.debug(f"{self.version} matches {version}?")
LOGGER.debug("%s matches %s?", self.version, version)
return version == self.version

def init_store(self, path: str, mode: str = "r") -> FSStore:
store = FSStore(path, mode=mode, dimension_separator=".")
LOGGER.debug(f"Created legacy flat FSStore({path}, {mode})")
LOGGER.debug("Created legacy flat FSStore(%s, %s)", path, mode)
return store

def generate_well_dict(
Expand All @@ -145,12 +145,14 @@ def validate_well_dict(
self, well: dict, rows: List[str], columns: List[str]
) -> None:
if any(e not in self.REQUIRED_PLATE_WELL_KEYS for e in well.keys()):
LOGGER.debug("f{well} contains unspecified keys")
LOGGER.debug("%s contains unspecified keys", well)
for key, key_type in self.REQUIRED_PLATE_WELL_KEYS.items():
if key not in well:
raise ValueError(f"{well} must contain a {key} key of type {key_type}")
raise ValueError(
"%s must contain a %s key of type %s", well, key, key_type
)
if not isinstance(well[key], key_type):
raise ValueError(f"{well} path must be of {key_type} type")
raise ValueError("%s path must be of %s type", well, key_type)

def generate_coordinate_transformations(
self, shapes: List[tuple]
Expand Down Expand Up @@ -198,7 +200,7 @@ def init_store(self, path: str, mode: str = "r") -> FSStore:
mode=mode,
**kwargs,
) # TODO: open issue for using Path
LOGGER.debug(f"Created nested FSStore({path}, {mode}, {kwargs})")
LOGGER.debug("Created nested FSStore(%s, %s, %s)", path, mode, kwargs)
return store


Expand Down Expand Up @@ -230,10 +232,10 @@ def generate_well_dict(
) -> dict:
row, column = well.split("/")
if row not in rows:
raise ValueError(f"{row} is not defined in the list of rows")
raise ValueError("%s is not defined in the list of rows", row)
rowIndex = rows.index(row)
if column not in columns:
raise ValueError(f"{column} is not defined in the list of columns")
raise ValueError("%s is not defined in the list of columns", column)
columnIndex = columns.index(column)
return {"path": str(well), "rowIndex": rowIndex, "columnIndex": columnIndex}

Expand All @@ -242,16 +244,16 @@ def validate_well_dict(
) -> None:
super().validate_well_dict(well, rows, columns)
if len(well["path"].split("/")) != 2:
raise ValueError(f"{well} path must exactly be composed of 2 groups")
raise ValueError("%s path must exactly be composed of 2 groups", well)
row, column = well["path"].split("/")
if row not in rows:
raise ValueError(f"{row} is not defined in the plate rows")
raise ValueError("%s is not defined in the plate rows", row)
if well["rowIndex"] != rows.index(row):
raise ValueError(f"Mismatching row index for {well}")
raise ValueError("Mismatching row index for %s", well)
if column not in columns:
raise ValueError(f"{column} is not defined in the plate columns")
raise ValueError("%s is not defined in the plate columns", column)
if well["columnIndex"] != columns.index(column):
raise ValueError(f"Mismatching column index for {well}")
raise ValueError("Mismatching column index for %s", well)

def generate_coordinate_transformations(
self, shapes: List[tuple]
Expand Down
20 changes: 11 additions & 9 deletions ome_zarr/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(
self, path: Union[Path, str], mode: str = "r", fmt: Format = CurrentFormat()
) -> None:

LOGGER.debug(f"ZarrLocation.__init__ path:{path}, fmt:{fmt.version}")
LOGGER.debug("ZarrLocation.__init__ path: %s, fmt: %s", path, fmt.version)
self.__fmt = fmt
self.__mode = mode
if isinstance(path, Path):
Expand All @@ -48,9 +48,11 @@ def __init__(

self.__init_metadata()
detected = detect_format(self.__metadata, loader)
LOGGER.debug(f"ZarrLocation.__init__ {path} detected:{detected}")
LOGGER.debug("ZarrLocation.__init__ %s detected: %s", path, detected)
if detected != fmt:
LOGGER.warning(f"version mismatch: detected:{detected}, requested:{fmt}")
LOGGER.warning(
"version mismatch: detected: %s, requested: %s", detected, fmt
)
self.__fmt = detected
self.__store = detected.init_store(self.__path, mode)
self.__init_metadata()
Expand Down Expand Up @@ -134,7 +136,7 @@ def basename(self) -> str:
def create(self, path: str) -> "ZarrLocation":
"""Create a new Zarr location for the given path."""
subpath = self.subpath(path)
LOGGER.debug(f"open({self.__class__.__name__}({subpath}))")
LOGGER.debug("open(%s(%s))", self.__class__.__name__, subpath)
return self.__class__(subpath, mode=self.__mode, fmt=self.__fmt)

def get_json(self, subpath: str) -> JSONDict:
Expand All @@ -151,10 +153,10 @@ def get_json(self, subpath: str) -> JSONDict:
return {}
return json.loads(data)
except KeyError:
LOGGER.debug(f"JSON not found: {subpath}")
LOGGER.debug("JSON not found: %s", subpath)
return {}
except Exception as e:
LOGGER.exception(f"{e}")
except Exception:
LOGGER.exception("Error while loading JSON")
return {}

def parts(self) -> List[str]:
Expand Down Expand Up @@ -188,7 +190,7 @@ def parse_url(
return None
else:
return loc
except Exception as e:
LOGGER.warning(f"exception on parsing: {e} (stacktrace at DEBUG)")
except Exception:
LOGGER.exception("exception on parsing (stacktrace at DEBUG)")
LOGGER.debug("stacktrace:", exc_info=True)
return None
52 changes: 27 additions & 25 deletions ome_zarr/reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ def add(
"""

if zarr in self.seen and not plate_labels:
LOGGER.debug(f"already seen {zarr}; stopping recursion")
LOGGER.debug("already seen %s; stopping recursion", zarr)
return None

if visibility is None:
Expand Down Expand Up @@ -169,7 +169,7 @@ def matches(zarr: ZarrLocation) -> bool:
def __init__(self, node: Node) -> None:
self.node = node
self.zarr = node.zarr
LOGGER.debug(f"treating {self.zarr} as {self.__class__.__name__}")
LOGGER.debug("treating %s as %s", self.zarr, self.__class__.__name__)
for k, v in self.zarr.root_attrs.items():
LOGGER.info("root_attr: %s", k)
LOGGER.debug(v)
Expand Down Expand Up @@ -217,12 +217,12 @@ def __init__(self, node: Node) -> None:
# This is an ome mask, load the image
parent_zarr = self.zarr.create(image)
if parent_zarr.exists():
LOGGER.debug(f"delegating to parent image: {parent_zarr}")
LOGGER.debug("delegating to parent image: %s", parent_zarr)
node.add(parent_zarr, prepend=True, visibility=False)
else:
parent_zarr = None
if parent_zarr is None:
LOGGER.warn(f"no parent found for {self}: {image}")
LOGGER.warning("no parent found for %s: %s", self, image)

# Metadata: TODO move to a class
colors: Dict[Union[int, bool], List[float]] = {}
Expand All @@ -240,8 +240,8 @@ def __init__(self, node: Node) -> None:
else:
raise Exception("not bool or int")

except Exception as e:
LOGGER.error(f"invalid color - {color}: {e}")
except Exception:
LOGGER.exception("invalid color - %s", color)

properties: Dict[int, Dict[str, str]] = {}
props_list = image_label.get("properties", [])
Expand Down Expand Up @@ -296,8 +296,8 @@ def __init__(self, node: Node) -> None:
if any(trans is not None for trans in transformations):
node.metadata["coordinateTransformations"] = transformations
LOGGER.info("datasets %s", datasets)
except Exception as e:
LOGGER.error(f"failed to parse multiscale metadata: {e}")
except Exception:
LOGGER.exception("Failed to parse multiscale metadata")
return # EARLY EXIT

for resolution in self.datasets:
Expand Down Expand Up @@ -350,7 +350,7 @@ def __init__(self, node: Node) -> None:
try:
len(channels)
except Exception:
LOGGER.warn(f"error counting channels: {channels}")
LOGGER.warning("error counting channels: %s", channels)
return # EARLY EXIT

colormaps = []
Expand Down Expand Up @@ -392,8 +392,8 @@ def __init__(self, node: Node) -> None:
node.metadata["contrast_limits"] = contrast_limits
node.metadata["colormap"] = colormaps

except Exception as e:
LOGGER.error(f"failed to parse metadata: {e}")
except Exception:
LOGGER.exception("Failed to parse metadata")


class Well(Spec):
Expand Down Expand Up @@ -429,11 +429,11 @@ def get_field(tile_name: str, level: int) -> np.ndarray:
row, col = (int(n) for n in tile_name.split(","))
field_index = (column_count * row) + col
path = f"{field_index}/{level}"
LOGGER.debug(f"LOADING tile... {path}")
LOGGER.debug("LOADING tile... %s", path)
try:
data = self.zarr.load(path)
except ValueError:
LOGGER.error(f"Failed to load {path}")
LOGGER.error("Failed to load %s", path)
data = np.zeros(self.img_pyramid_shapes[level], dtype=self.numpy_type)
return data

Expand All @@ -446,7 +446,10 @@ def get_lazy_well(level: int, tile_shape: tuple) -> da.Array:
for col in range(column_count):
tile_name = f"{row},{col}"
LOGGER.debug(
f"creating lazy_reader. row:{row} col:{col} level:{level}"
"creating lazy_reader. row: %s col: %s level: %s",
row,
col,
level,
)
lazy_tile = da.from_delayed(
lazy_reader(tile_name, level),
Expand Down Expand Up @@ -475,7 +478,7 @@ def matches(zarr: ZarrLocation) -> bool:

def __init__(self, node: Node) -> None:
super().__init__(node)
LOGGER.debug(f"Plate created with ZarrLocation fmt:{ self.zarr.fmt}")
LOGGER.debug("Plate created with ZarrLocation fmt: %s", self.zarr.fmt)
self.get_pyramid_lazy(node)

def get_pyramid_lazy(self, node: Node) -> None:
Expand Down Expand Up @@ -505,7 +508,7 @@ def get_pyramid_lazy(self, node: Node) -> None:
raise Exception("Could not find first well")
self.numpy_type = well_spec.numpy_type

LOGGER.debug(f"img_pyramid_shapes: {well_spec.img_pyramid_shapes}")
LOGGER.debug("img_pyramid_shapes: %s", well_spec.img_pyramid_shapes)

self.axes = well_spec.img_metadata["axes"]

Expand Down Expand Up @@ -533,19 +536,18 @@ def get_tile_path(self, level: int, row: int, col: int) -> str:
)

def get_stitched_grid(self, level: int, tile_shape: tuple) -> da.core.Array:
LOGGER.debug(f"get_stitched_grid() level: {level}, tile_shape: {tile_shape}")
LOGGER.debug("get_stitched_grid() level: %s, tile_shape: %s", level, tile_shape)

def get_tile(tile_name: str) -> np.ndarray:
"""tile_name is 'level,z,c,t,row,col'"""
row, col = (int(n) for n in tile_name.split(","))
path = self.get_tile_path(level, row, col)
LOGGER.debug(f"LOADING tile... {path} with shape: {tile_shape}")
LOGGER.debug("LOADING tile... %s with shape: %s", path, tile_shape)

try:
data = self.zarr.load(path)
except ValueError as e:
LOGGER.error(f"Failed to load {path}")
LOGGER.debug(f"{e}")
except ValueError:
LOGGER.exception("Failed to load %s", path)
data = np.zeros(tile_shape, dtype=self.numpy_type)
return data

Expand Down Expand Up @@ -627,28 +629,28 @@ def __call__(self) -> Iterator[Node]:
node = Node(self.zarr, self)
if node.specs: # Something has matched

LOGGER.debug(f"treating {self.zarr} as ome-zarr")
LOGGER.debug("treating %s as ome-zarr", self.zarr)
yield from self.descend(node)

# TODO: API thoughts for the Spec type
# - ask for recursion or not
# - ask for "provides data", "overrides data"

elif self.zarr.zarray: # Nothing has matched
LOGGER.debug(f"treating {self.zarr} as raw zarr")
LOGGER.debug("treating %s as raw zarr", self.zarr)
node.data.append(self.zarr.load())
yield node

else:
LOGGER.debug(f"ignoring {self.zarr}")
LOGGER.debug("ignoring %s", self.zarr)
# yield nothing

def descend(self, node: Node, depth: int = 0) -> Iterator[Node]:

for pre_node in node.pre_nodes:
yield from self.descend(pre_node, depth + 1)

LOGGER.debug(f"returning {node}")
LOGGER.debug("returning %s", node)
yield node

for post_node in node.post_nodes:
Expand Down
2 changes: 1 addition & 1 deletion ome_zarr/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ def download(input_path: str, output_dir: str = ".") -> None:
if datasets and resolutions:
pbar = ProgressBar()
for dataset, data in reversed(list(zip(datasets, resolutions))):
LOGGER.info(f"resolution {dataset}...")
LOGGER.info("resolution %s...", dataset)
with pbar:
data.to_zarr(str(target_path / dataset))
else:
Expand Down
4 changes: 2 additions & 2 deletions ome_zarr/writer.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@ def _get_valid_axes(
if axes is None:
if ndim == 2:
axes = ["y", "x"]
LOGGER.info("Auto using axes %s for 2D data" % axes)
LOGGER.info("Auto using axes %s for 2D data", axes)
elif ndim == 5:
axes = ["t", "c", "z", "y", "x"]
LOGGER.info("Auto using axes %s for 5D data" % axes)
LOGGER.info("Auto using axes %s for 5D data", axes)
else:
raise ValueError(
"axes must be provided. Can't be guessed for 3D or 4D data"
Expand Down

0 comments on commit ba03145

Please sign in to comment.