-
Notifications
You must be signed in to change notification settings - Fork 6
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add array utils * add dataloader * change no overlap, display progress * add numpy array experiment to enum * precommit * add dataloader creation function * add array configs * add test * update docs * remove prediction saving callback * remove aux section * update docstring * update docs * precommit --------- Co-authored-by: Benjamin Morris <[email protected]>
- Loading branch information
1 parent
8e05a63
commit 7668cac
Showing
21 changed files
with
359 additions
and
23 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
_target_: cyto_dl.datamodules.array.make_array_dataloader | ||
data: | ||
num_workers: 1 | ||
batch_size: 1 | ||
source_key: ${source_col} | ||
transforms: | ||
- _target_: monai.transforms.ToTensord | ||
keys: | ||
- ${source_col} | ||
- _target_: cyto_dl.image.transforms.clip.Clipd | ||
keys: | ||
- ${source_col} | ||
- _target_: monai.transforms.NormalizeIntensityd | ||
channel_wise: true | ||
keys: | ||
- ${source_col} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
_aux: | ||
patch_shape: | ||
_scales_dict: | ||
- - ${target_col} | ||
- [1] | ||
- - ${source_col} | ||
- [1] | ||
|
||
train_dataloaders: | ||
_target_: cyto_dl.datamodules.array.make_array_dataloader | ||
data: | ||
num_workers: 0 | ||
batch_size: 1 | ||
source_key: ${source_col} | ||
transforms: | ||
- _target_: monai.transforms.ToTensord | ||
keys: | ||
- ${source_col} | ||
- ${target_col} | ||
- _target_: cyto_dl.image.transforms.clip.Clipd | ||
keys: ${source_col} | ||
- _target_: monai.transforms.NormalizeIntensityd | ||
keys: ${source_col} | ||
channel_wise: true | ||
- _target_: monai.transforms.ThresholdIntensityd | ||
keys: ${target_col} | ||
threshold: 0.1 | ||
above: False | ||
cval: 1 | ||
- _target_: cyto_dl.image.transforms.RandomMultiScaleCropd | ||
keys: | ||
- ${source_col} | ||
- ${target_col} | ||
patch_shape: ${data._aux.patch_shape} | ||
patch_per_image: 1 | ||
scales_dict: ${kv_to_dict:${data._aux._scales_dict}} | ||
- _target_: monai.transforms.RandHistogramShiftd | ||
prob: 0.1 | ||
keys: ${source_col} | ||
num_control_points: [90, 500] | ||
|
||
- _target_: monai.transforms.RandStdShiftIntensityd | ||
prob: 0.1 | ||
keys: ${source_col} | ||
factors: 0.1 | ||
|
||
- _target_: monai.transforms.RandAdjustContrastd | ||
prob: 0.1 | ||
keys: ${source_col} | ||
gamma: [0.9, 1.5] | ||
|
||
val_dataloaders: | ||
_target_: cyto_dl.datamodules.array.make_array_dataloader | ||
data: | ||
num_workers: 0 | ||
batch_size: 1 | ||
source_key: ${source_col} | ||
transforms: | ||
- _target_: monai.transforms.ToTensord | ||
keys: | ||
- ${source_col} | ||
- ${target_col} | ||
- _target_: cyto_dl.image.transforms.clip.Clipd | ||
keys: ${source_col} | ||
- _target_: monai.transforms.NormalizeIntensityd | ||
keys: ${source_col} | ||
channel_wise: true | ||
- _target_: monai.transforms.ThresholdIntensityd | ||
keys: ${target_col} | ||
threshold: 0.1 | ||
above: False | ||
cval: 1 | ||
- _target_: cyto_dl.image.transforms.RandomMultiScaleCropd | ||
keys: | ||
- ${source_col} | ||
- ${target_col} | ||
patch_shape: ${data._aux.patch_shape} | ||
patch_per_image: 1 | ||
scales_dict: ${kv_to_dict:${data._aux._scales_dict}} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,40 @@ | ||
# @package _global_ | ||
# to execute this experiment run: | ||
# python train.py experiment=example | ||
defaults: | ||
- override /data: im2im/numpy_dataloader_train.yaml | ||
- override /model: im2im/segmentation.yaml | ||
- override /callbacks: default.yaml | ||
- override /trainer: gpu.yaml | ||
- override /logger: csv.yaml | ||
|
||
# all parameters below will be merged with parameters from default configurations set above | ||
# this allows you to overwrite only specified parameters | ||
|
||
tags: ["dev"] | ||
seed: 12345 | ||
|
||
experiment_name: YOUR_EXP_NAME | ||
run_name: YOUR_RUN_NAME | ||
source_col: raw | ||
target_col: seg | ||
spatial_dims: 3 | ||
raw_im_channels: 1 | ||
|
||
trainer: | ||
max_epochs: 100 | ||
|
||
data: | ||
_aux: | ||
# 2D | ||
# patch_shape: [64, 64] | ||
# 3D | ||
patch_shape: [16, 32, 32] | ||
|
||
callbacks: | ||
saving: | ||
_target_: cyto_dl.callbacks.ImageSaver | ||
save_dir: ${paths.output_dir} | ||
save_every_n_epochs: ${model.save_images_every_n_epochs} | ||
stages: ["train", "test", "val"] | ||
save_input: True |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
from typing import Callable, Dict, List, Sequence, Union | ||
|
||
import numpy as np | ||
from monai.data import DataLoader, Dataset | ||
from monai.transforms import Compose | ||
from omegaconf import ListConfig, OmegaConf | ||
|
||
|
||
def make_array_dataloader( | ||
data: Union[np.ndarray, List[np.ndarray], List[Dict[str, np.ndarray]]], | ||
transforms: Union[Sequence[Callable], Callable], | ||
source_key: str = "input", | ||
**dataloader_kwargs, | ||
): | ||
"""Create a dataloader from a an array dataset. | ||
Parameters | ||
---------- | ||
data: Union[np.ndarray, List[np.ndarray], List[Dict[str, np.ndarray]], | ||
If a numpy array (prediction only), the dataloader will be created with a single source_key. | ||
If a list each element must be a numpy array (for prediction) or a dictionary containing numpy array values (for training). | ||
transforms: Union[Sequence[Callable], Callable], | ||
Transforms to apply to each sample | ||
dataloader_kwargs: | ||
Additional keyword arguments are passed to the | ||
torch.utils.data.DataLoader class when instantiating it (aside from | ||
`shuffle` which is only used for the train dataloader). | ||
Among these args are `num_workers`, `batch_size`, `shuffle`, etc. | ||
See the PyTorch docs for more info on these args: | ||
https://pytorch.org/docs/stable/data.html#torch.utils.data.DataLoader | ||
""" | ||
if isinstance(transforms, (list, tuple, ListConfig)): | ||
transforms = Compose(transforms) | ||
data = OmegaConf.to_object(data) | ||
if isinstance(data, (list, tuple, ListConfig)): | ||
data = [{source_key: d} if isinstance(d, np.ndarray) else d for d in data] | ||
elif isinstance(data, np.ndarray): | ||
data = [{source_key: data}] | ||
else: | ||
raise ValueError( | ||
f"Invalid data type: {type(data)}. Data must be a numpy array or list of numpy arrays." | ||
) | ||
|
||
dataset = Dataset(data, transform=transforms) | ||
|
||
return DataLoader(dataset, **dataloader_kwargs) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.