Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Fix warnings reported by Deepsource #846

Merged
merged 11 commits into from
Dec 16, 2022
2 changes: 1 addition & 1 deletion package/PartSeg/_roi_analysis/batch_window.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def save(
range_changed=None,
step_changed=None,
):
pass
"""empty function to satisfy interface"""

@classmethod
def get_name(cls) -> str:
Expand Down
2 changes: 1 addition & 1 deletion package/PartSeg/_roi_analysis/prepare_plan_widget.py
Original file line number Diff line number Diff line change
Expand Up @@ -1047,7 +1047,7 @@ def update_view(self, reset=False):
QTreeWidgetItem(child, [line])

else:
logging.error(f"Unknown operation {op_type}")
logging.error("Unknown operation %s", op_type) # pragma: no cover
self.blockSignals(False)
self.set_path()
self.changed_node.emit()
Expand Down
2 changes: 1 addition & 1 deletion package/PartSeg/common_gui/algorithms_description.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ def set_value(self, val):
try:
return self._setter(self._widget, val)
except (TypeError, ValueError) as e:
logging.error(f"Error {e} setting value {val} to {self.name}")
logging.error("Error %s setting value %s to %s", e, val, self.name)

def get_field(self) -> QWidget:
"""
Expand Down
2 changes: 1 addition & 1 deletion package/PartSeg/common_gui/flow_layout.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def addLayout(self, layout):
self.itemList.append(layout)

def addStretch(self):
pass
"""To satisfy the QLayout API"""

def count(self):
return len(self.itemList)
Expand Down
4 changes: 2 additions & 2 deletions package/PartSeg/common_gui/napari_image_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -1031,8 +1031,8 @@ def _calc_layer_filter(layer: NapariImage, filter_type: NoiseFilterType, radius:


def _print_dict(dkt: MutableMapping, indent="") -> str:
if not isinstance(dkt, MutableMapping):
logging.error(f"{type(dkt)} instead of dict passed to _print_dict")
if not isinstance(dkt, MutableMapping): # pragma: no cover
logging.error("%s instead of dict passed to _print_dict", type(dkt))
return indent + str(dkt)
res = []
for k, v in dkt.items():
Expand Down
3 changes: 1 addition & 2 deletions package/PartSeg/plugins/old_partseg/old_partseg.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,7 @@ def _load(cls, tar_file: tarfile.TarFile, file_path: str) -> ProjectTuple:
seg_array = np.load(res_buffer)
except KeyError:
seg_array = None
algorithm_str = tar_file.extractfile("data.json").read()
algorithm_dict = json.loads(algorithm_str)
algorithm_dict = json.load(tar_file.extractfile("data.json"))
spacing = np.array(algorithm_dict["spacing"][::-1]) / UNIT_SCALE[Units.nm.value]
image = Image(image_arr.reshape((1,) + image_arr.shape + (1,)), spacing, file_path, axes_order="TZYXC")
values = {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,7 @@ def set_number_of_workers(self, val: int):

:param int val: number of workers.
"""
logging.debug(f"Number off process {val}")
logging.debug("Number off process %s", val)
self.batch_manager.set_number_of_process(val)

def get_results(self) -> BatchResultDescription:
Expand Down Expand Up @@ -764,7 +764,7 @@ def wrote_data_to_file(self):
if i == 100: # pragma: no cover
raise PermissionError(f"Fail to write result excel {self.file_path}")
except Exception as e: # pragma: no cover # pylint: disable=W0703
logging.error(f"[batch_backend] {e}")
logging.error("[batch_backend] %s", e)
self.error_queue.put(prepare_error_data(e))
finally:
self.writing = False
Expand Down
21 changes: 12 additions & 9 deletions package/PartSegCore/analysis/batch_processing/parallel_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,11 +159,11 @@ def cancel_work(self, global_parameters):
del self.calculation_dict[global_parameters.uuid]

def join_all(self):
logging.debug(f"Join begin {len(self.process_list)} {self.number_off_process}")
logging.debug("Join begin %s %s", len(self.process_list), self.number_off_process)
with self.locker:
if len(self.process_list) > self.number_off_process:
to_remove = []
logging.debug(f"Process list start {self.process_list}")
logging.debug("Process list start %s", self.process_list)
for p in self.process_list:
if not p.is_alive():
p.join()
Expand All @@ -172,12 +172,15 @@ def join_all(self):
for p in to_remove:
self.process_list.remove(p)
self.number_off_alive_process -= len(to_remove)
logging.debug(f"Process list end {self.process_list}")
logging.debug("Process list end %s", self.process_list)
# FIXME self.number_off_alive_process, self.number_off_process negative values
if len(self.process_list) > self.number_off_process and len(self.process_list) > 0:
logging.info(
f"Wait on process, time {time.time()}, {self.number_off_alive_process},"
f" {len(self.process_list)}, {self.number_off_process}"
"Wait on process, time %s, %s, %s, %s",
time.time(),
self.number_off_alive_process,
len(self.process_list),
self.number_off_process,
)

Timer(1, self.join_all).start()
Expand Down Expand Up @@ -235,12 +238,12 @@ def calculate_task(self, val: Tuple[Any, uuid.UUID]):

def run(self):
"""Worker main loop"""
logging.debug(f"Process started {os.getpid()}")
logging.debug("Process started %s", os.getpid())
while True:
if not self.order_queue.empty():
with suppress(Empty):
order = self.order_queue.get_nowait()
logging.debug(f"Order message: {order}")
logging.debug("Order message: %s", order)
if order == SubprocessOrder.kill:
break
if not self.task_queue.empty():
Expand All @@ -253,10 +256,10 @@ def run(self):
except (MemoryError, OSError): # pragma: no cover
pass
except Exception as ex: # pragma: no cover # pylint: disable=W0703
logging.warning(f"Unsupported exception {ex}")
logging.warning("Unsupported exception %s", ex)
else:
time.sleep(0.1)
logging.info(f"Process {os.getpid()} ended")
logging.info("Process %s ended", os.getpid())


def spawn_worker(task_queue: Queue, order_queue: Queue, result_queue: Queue, calculation_dict: Dict[uuid.UUID, Any]):
Expand Down
2 changes: 1 addition & 1 deletion package/PartSegCore/analysis/calculate_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@


def _empty_fun(_a1, _a2):
pass
"""Empty function for calculation run callback"""


@dataclass(frozen=True)
Expand Down
10 changes: 5 additions & 5 deletions package/PartSegCore/analysis/calculation_plan.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,16 +255,16 @@ def set_map_path(self, value):
self.path_to_file = value

def parse_map(self, sep=";"):
if not os.path.exists(self.path_to_file):
logging.error(f"File does not exists: {self.path_to_file}")
if not os.path.exists(self.path_to_file): # pragma: no cover
logging.error("File does not exists: %s", self.path_to_file)
raise ValueError(f"File for mapping mask does not exists: {self.path_to_file}")
with open(self.path_to_file, encoding="utf-8") as map_file:
dir_name = os.path.dirname(self.path_to_file)
for i, line in enumerate(map_file):
try:
file_name, mask_name = line.split(sep)
except ValueError:
logging.error(f"Error in parsing map file\nline {i}\n{line}\nfrom file{self.path_to_file}")
except ValueError: # pragma: no cover
logging.error("Error in parsing map file\nline %s\n%s\nfrom file %s", i, line, self.path_to_file)
continue
file_name = file_name.strip()
mask_name = mask_name.strip()
Expand Down Expand Up @@ -710,7 +710,7 @@ def get_el_name(el): # noqa C901
:param el: Plan element
:return: str
"""
if el.__class__.__name__ not in CalculationPlan.correct_name.keys():
if el.__class__.__name__ not in CalculationPlan.correct_name:
print(el, el.__class__.__name__, file=sys.stderr)
raise ValueError(f"Unknown type {el.__class__.__name__}")
if isinstance(el, RootType):
Expand Down
22 changes: 10 additions & 12 deletions package/PartSegCore/class_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,8 +187,7 @@ def add_classes(types_list, translate_dict, global_state):
continue
if hasattr(type_, "__module__") and type_.__module__ == "typing":
if hasattr(type_, "__args__") and isinstance(type_.__args__, collections.abc.Iterable):
sub_types = [x for x in type_.__args__ if not isinstance(x, omit_list)]
if sub_types:
if sub_types := [x for x in type_.__args__ if not isinstance(x, omit_list)]:
add_classes(sub_types, translate_dict, global_state)
if type_._name is None: # pylint: disable=W0212
type_str = str(type_.__origin__)
Expand Down Expand Up @@ -251,8 +250,7 @@ def _make_class(typename, types, defaults_dict, base_classes, readonly):
field_definitions = ""
init_sig = [f"self.{f_name} = {v_name}" for f_name, v_name in zip(slots, type_dict.keys())]
tuple_list = [f"self.{name_}" for name_ in slots]
init_content = "\n ".join(init_sig)
init_content += "\n self.__post_init__()"
init_content = "\n ".join(init_sig) + "\n self.__post_init__()"
class_definition = _class_template.format(
imports="\n".join(import_set),
typename=typename,
Expand All @@ -268,7 +266,7 @@ def _make_class(typename, types, defaults_dict, base_classes, readonly):
base_classes=", ".join(translate_dict[x] for x in base_classes),
)

global_state["__name__"] = "serialize_%s" % typename
global_state["__name__"] = f"serialize_{typename}"
try:
# pylint: disable=W0122
exec(class_definition, global_state) # nosec
Expand All @@ -291,9 +289,9 @@ def _make_class(typename, types, defaults_dict, base_classes, readonly):


class BaseMeta(type):
def __new__(mcs, name, bases, attrs):
def __new__(cls, name, bases, attrs):
if attrs.get("_root", False):
return super().__new__(mcs, name, bases, attrs)
return super().__new__(cls, name, bases, attrs)
warnings.warn(
"BaseSerializableClass is deprecated, use pydantic.BaseModel instead", FutureWarning, stacklevel=2
)
Expand Down Expand Up @@ -352,23 +350,23 @@ class BaseSerializableClass(metaclass=BaseMeta):
__old_names__ = ()

def __init__(self, *args, **kwargs):
pass
"""declare interface"""

def __post_init__(self):
pass
"""declare interface"""

def asdict(self) -> collections.OrderedDict:
pass
"""declare interface"""

def replace_(self, **_kwargs):
return self

def as_tuple(self) -> typing.Tuple:
pass
"""declare interface"""

@classmethod
def make_(cls, iterable):
pass
"""declare interface"""


class SerializeClassEncoder(json.JSONEncoder):
Expand Down
Loading