Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add nice widgets callbacks #149

Merged
merged 16 commits into from
Apr 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 15 additions & 15 deletions plantseg/resources/models_zoo.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ generic_confocal_3D_unet:
description: "Unet trained on confocal images of Arabidopsis Ovules on 1/2-resolution in XY with BCEDiceLoss."
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [80, 170, 170]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"


Expand All @@ -18,7 +18,7 @@ generic_light_sheet_3D_unet:
description: "Unet trained on light-sheet images of Arabidopsis Lateral Root Primordia on original resolution with BCEDiceLoss."
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

## Ovules
Expand All @@ -28,7 +28,7 @@ confocal_3D_unet_ovules_ds1x:
description: "Unet trained on confocal images of Arabidopsis Ovules on original resolution with BCEDiceLoss."
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

confocal_3D_unet_ovules_ds2x:
Expand All @@ -37,7 +37,7 @@ confocal_3D_unet_ovules_ds2x:
description: "Unet trained on confocal images of Arabidopsis Ovules on 1/2-resolution in XY with BCEDiceLoss."
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

confocal_3D_unet_ovules_ds3x:
Expand All @@ -46,7 +46,7 @@ confocal_3D_unet_ovules_ds3x:
description: "Unet trained on confocal images of Arabidopsis Ovules on 1/3-resolution in XY with BCEDiceLoss."
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

confocal_2D_unet_ovules_ds2x:
Expand All @@ -55,7 +55,7 @@ confocal_2D_unet_ovules_ds2x:
description: "2D Unet trained on z-slices of confocal images of Arabidopsis Ovules (1/2-resolution in XY) with BCEDiceLoss."
dimensionality: "2D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 1, 256, 256 ]
output_type: "boundaries"

confocal_3D_unet_ovules_nuclei_ds1x:
Expand All @@ -64,7 +64,7 @@ confocal_3D_unet_ovules_nuclei_ds1x:
description: "Unet trained on confocal images of Arabidopsis Ovules nuclei stain on original resolution with BCEDiceLoss. The network predicts 1 channel: nuclei probability maps"
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "nuclei"

## Root
Expand All @@ -74,7 +74,7 @@ lightsheet_3D_unet_root_ds1x:
description: "Unet trained on light-sheet images of Lateral Root Primordia on original resolution with BCEDiceLoss."
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

lightsheet_3D_unet_root_ds2x:
Expand All @@ -83,7 +83,7 @@ lightsheet_3D_unet_root_ds2x:
description: "Unet trained on light-sheet images of Lateral Root Primordia on 1/2-resolution in XY with BCEDiceLoss."
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

lightsheet_3D_unet_root_ds3x:
Expand All @@ -92,7 +92,7 @@ lightsheet_3D_unet_root_ds3x:
description: "Unet trained on light-sheet images of Lateral Root Primordia on 1/3-resolution in XY with BCEDiceLoss."
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

lightsheet_2D_unet_root_ds1x:
Expand All @@ -101,7 +101,7 @@ lightsheet_2D_unet_root_ds1x:
description: "2D Unet trained on z-slices of light-sheet images of Lateral Root Primordia on original resolution with BCEDiceLoss."
dimensionality: "2D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 1, 256, 256 ]
output_type: "boundaries"

lightsheet_3D_unet_root_nuclei_ds1x:
Expand All @@ -110,7 +110,7 @@ lightsheet_3D_unet_root_nuclei_ds1x:
description: "Unet trained on light-sheet images of Lateral Root Primordia nuclei on original resolution with BCEDiceLoss. The network predicts 2 channels: nuclei mask in the 1st channel, nuclei boundaries in the 2nd channel"
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "nuclei"

lightsheet_2D_unet_root_nuclei_ds1x:
Expand All @@ -119,7 +119,7 @@ lightsheet_2D_unet_root_nuclei_ds1x:
description: "2D Unet trained on z-slices of light-sheet images of Lateral Root Primordia nuclei on original resolution with BCEDiceLoss. The network predicts 2 channels: nuclei mask in the 1st channel, nuclei boundaries in the 2nd channel"
dimensionality: "2D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 1, 256, 256 ]
output_type: "nuclei"

# PNAS
Expand All @@ -138,7 +138,7 @@ confocal_3D_unet_sa_meristem_cells:
description: "3D Unet trained on confocal images of Arabidopsis thaliana apical stem cell: https://www.repository.cam.ac.uk/handle/1810/262530"
dimensionality: "3D"
modality: "confocal"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

# Mouse embryo ex vivo
Expand All @@ -148,7 +148,7 @@ lightsheet_3D_unet_mouse_embryo_cells:
description: "A a variant of 3D U-Net trained to predict the cell boundaries in live light-sheet images of ex-vivo developing mouse embryo. Voxel size: (0.2×0.2×1 µm^3) (XYZ)"
dimensionality: "3D"
modality: "light-sheet"
recommended_patch_size: [ 80, 170, 170 ]
recommended_patch_size: [ 80, 160, 160 ]
output_type: "boundaries"

confocal_3D_unet_mouse_embryo_nuclei:
Expand Down
2 changes: 2 additions & 0 deletions plantseg/run_plantseg.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ def parser():


def main():
from plantseg.utils import check_version
check_version()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

would be good to surround this with try-except clause in case there is a problem with github/network. We should not fail if external service is down

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Fixed it

args = parser()

if args.gui:
Expand Down
35 changes: 35 additions & 0 deletions plantseg/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,13 @@
from pathlib import Path
from shutil import copy2
from typing import Tuple, Optional
from warnings import warn

import requests
import yaml

from plantseg import model_zoo_path, custom_zoo, home_path, PLANTSEG_MODELS_DIR, plantseg_global_path
from plantseg.__version__ import __version__ as current_version
from plantseg.pipeline import gui_logger

CONFIG_TRAIN_YAML = "config_train.yml"
Expand Down Expand Up @@ -276,3 +278,36 @@ def clean_models():

else:
print("Invalid input, please type 'y' or 'n'.")


def check_version(plantseg_url=' https://api.github.com/repos/hci-unihd/plant-seg/releases/latest'):
try:
response = requests.get(plantseg_url).json()
latest_version = response['tag_name']

except requests.exceptions.ConnectionError:
warn("Connection error, could not check for new version.")
return None
except requests.exceptions.Timeout:
warn("Connection timeout, could not check for new version.")
return None
except requests.exceptions.TooManyRedirects:
warn("Too many redirects, could not check for new version.")
return None
except Exception as e:
warn(f"Unknown error, could not check for new version. Error: {e}")
return None

latest_version_numeric = [int(x) for x in latest_version.split(".")]
plantseg_version_numeric = [int(x) for x in current_version.split(".")]

if len(latest_version_numeric) != len(plantseg_version_numeric):
warn(f"Could not check for new version, version number not in the correct format.\n"
f"Current version: {current_version}, latest version: {latest_version}")
return None

for l_v, p_v in zip(latest_version_numeric, plantseg_version_numeric):
if l_v > p_v:
print(f"New version of PlantSeg available: {latest_version}.\n"
f"Please update your version to the latest one!")
return None
8 changes: 5 additions & 3 deletions plantseg/viewer/headless.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@
from plantseg.viewer.dag_handler import DagHandler
from plantseg.viewer.widget.predictions import ALL_DEVICES, ALL_CUDA_DEVICES

all_gpus_str = f'all {len(ALL_CUDA_DEVICES)} gpus'
all_gpus_str = f'all gpus: {len(ALL_CUDA_DEVICES)}'
ALL_GPUS = [all_gpus_str] if len(ALL_CUDA_DEVICES) > 0 else []
ALL_DEVICES_HEADLESS = ALL_DEVICES + ALL_GPUS

MAX_WORKERS = len(ALL_CUDA_DEVICES) if len(ALL_CUDA_DEVICES) > 0 else multiprocessing.cpu_count()


def _parse_input_paths(inputs, path_suffix='_path'):
list_input_paths = [_input for _input in inputs if _input[-len(path_suffix):] == path_suffix]
Expand All @@ -39,7 +41,7 @@ def run_workflow_headless(path):
'widget_type': 'IntSlider',
'tooltip': 'Define the size of the gaussian smoothing kernel. '
'The larger the more blurred will be the output image.',
'max': multiprocessing.cpu_count(), 'min': 1},
'max': MAX_WORKERS, 'min': 1},
scheduler={'label': 'Scheduler',
'choices': ['multiprocessing', 'threaded']
},
Expand All @@ -48,7 +50,7 @@ def run_workflow_headless(path):
def run(list_inputs: input_hints,
out_directory: Path = Path.home(),
device: str = ALL_DEVICES_HEADLESS[0],
num_workers: int = 1,
num_workers: int = MAX_WORKERS,
scheduler: str = 'multiprocessing'):
dict_of_jobs = {}
cluster = distributed.LocalCluster(n_workers=num_workers, threads_per_worker=1)
Expand Down
Loading