Skip to content

Commit

Permalink
Merge pull request #89 from KrisThielemans/kris_eval
Browse files Browse the repository at this point in the history
Improvements and completion of evaluation/generation scripts
  • Loading branch information
KrisThielemans authored Aug 22, 2024
2 parents 0777291 + ced6a57 commit 513473a
Show file tree
Hide file tree
Showing 19 changed files with 500 additions and 60 deletions.
1 change: 1 addition & 0 deletions SIRF_data_preparation/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
runs/
17 changes: 0 additions & 17 deletions SIRF_data_preparation/BSREM_Vision600_thorax.py

This file was deleted.

Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
from petric import MetricsWithTimeout, get_data
from petric import SRCDIR, MetricsWithTimeout, get_data
from sirf.contrib.BSREM.BSREM import BSREM1
from sirf.contrib.partitioner import partitioner
from SIRF_data_preparation.dataset_settings import get_settings

data = get_data(srcdir="./data/NeuroLF_Hoffman_Dataset", outdir="./output/BSREM_NeuroLF_Hoffman")
num_subsets = 16
scanID = "NeuroLF_Hoffman_Dataset"
settings = get_settings(scanID)
slices = settings.slices
num_subsets = settings.num_subsets
outdir = f"./output/{scanID}/BSREM"
outdir1 = f"./output/{scanID}/BSREM_cont"

data = get_data(srcdir=SRCDIR / scanID, outdir=outdir)
data_sub, acq_models, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term, data.mult_factors,
num_subsets, initial_image=data.OSEM_image)
# WARNING: modifies prior strength with 1/num_subsets (as currently needed for BSREM implementations)
Expand All @@ -13,5 +20,6 @@
f.set_prior(data.prior)

algo = BSREM1(data_sub, obj_funs, initial=data.OSEM_image, initial_step_size=.3, relaxation_eta=.01,
update_objective_interval=10)
algo.run(5000, callbacks=[MetricsWithTimeout(transverse_slice=72)])
update_objective_interval=80)
# %%
algo.run(15000, callbacks=[MetricsWithTimeout(**slices, outdir=outdir, seconds=3600 * 100)])
97 changes: 97 additions & 0 deletions SIRF_data_preparation/NeuroLF_Hoffman_Dataset/NeuroLF_VOIs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
# %%
import os
from pathlib import Path

import matplotlib.pyplot as plt
from scipy.ndimage import binary_erosion

import sirf.STIR as STIR
from petric import SRCDIR
from SIRF_data_preparation import data_QC

# %%
# %matplotlib ipympl
# %%
STIR.AcquisitionData.set_storage_scheme('memory')
# set-up redirection of STIR messages to files
# _ = STIR.MessageRedirector('BSREM_info.txt', 'BSREM_warnings.txt', 'BSREM_errors.txt')
# fewer messages from STIR and SIRF (set to higher value to diagnose have problems)
STIR.set_verbosity(0)


# %%
def plot_image(image, vmin=0, vmax=None):
if vmax is None:
vmax = image.max()
plt.figure()
plt.subplot(131)
plt.imshow(image.as_array()[im_slice, :, :], vmin=vmin, vmax=vmax)
plt.subplot(132)
plt.imshow(image.as_array()[:, cor_slice, :], vmin=vmin, vmax=vmax)
plt.subplot(133)
plt.imshow(image.as_array()[:, :, sag_slice], vmin=vmin, vmax=vmax)
plt.show()


# %% read
if not SRCDIR.is_dir():
PETRICDIR = Path('~/devel/PETRIC').expanduser()
SRCDIR = PETRICDIR / 'data'
datadir = str(SRCDIR / 'NeuroLF_Hoffman_Dataset') + '/'

# %%
OSEM_image = STIR.ImageData(datadir + 'OSEM_image.hv')
im_slice = 72
cor_slice = OSEM_image.dimensions()[1] // 2
sag_slice = OSEM_image.dimensions()[2] // 2
cmax = OSEM_image.max()

# %%
reference_image = STIR.ImageData(datadir + 'reference_image.hv')
# %% read in original VOIs
whole_object_mask = STIR.ImageData(datadir + 'whole_phantom.hv')
# whole_object_mask = STIR.ImageData(datadir+'VOI_whole_object.hv')
# from README.md
# 1: ventricles, 2: white matter, 3: grey matter
allVOIs = STIR.ImageData(datadir + 'vois_ventricles_white_grey.hv')
# %% create PETRIC VOIs
background_mask = whole_object_mask.clone()
background_mask.fill(binary_erosion(allVOIs.as_array() == 2, iterations=2))
VOI1_mask = whole_object_mask.clone()
VOI1_mask.fill(allVOIs.as_array() == 1)
VOI2_mask = whole_object_mask.clone()
VOI2_mask.fill(binary_erosion(allVOIs.as_array() == 2, iterations=1))
VOI3_mask = whole_object_mask.clone()
VOI3_mask.fill(allVOIs.as_array() == 3)
# %% write PETRIC VOIs
whole_object_mask.write(datadir + 'PETRIC/VOI_whole_object.hv')
background_mask.write(datadir + 'PETRIC/VOI_background.hv')
VOI1_mask.write(datadir + 'PETRIC/VOI_ventricles.hv')
VOI2_mask.write(datadir + 'PETRIC/VOI_WM.hv')
VOI3_mask.write(datadir + 'PETRIC/VOI_GM.hv')
# %%
VOIs = (whole_object_mask, background_mask, VOI1_mask, VOI2_mask, VOI3_mask)
# %%
[plot_image(VOI) for VOI in VOIs]
# %%
[plot_image(VOI) for VOI in VOIs]


# %%
def VOI_mean(image, VOI):
return float((image * VOI).sum() / VOI.sum())


# %%
[VOI_mean(OSEM_image, VOI) for VOI in VOIs]
# %%
[VOI_mean(reference_image, VOI) for VOI in VOIs]

# %%
os.chdir(datadir)
# %%
data_QC.main()
# %%
voidir = Path(datadir) / 'PETRIC'
[str(voi)[:-3] for voi in voidir.glob("VOI_*.hv")]
# %%
Empty file.
26 changes: 15 additions & 11 deletions SIRF_data_preparation/README.md
Original file line number Diff line number Diff line change
@@ -1,23 +1,27 @@
# Functions/classes for PETRIC

## Functions to create images that are provided to participants

- `create_initial_images.py`: functions+script to run OSEM and compute the "kappa" image from existing data
- `BSREM_common.py`: functions to run BSREM with various callbacks
- `BSREM_*.py`: functions with specific settings for a particular data-set

## Utility functions to prepare data for the Challenge

Participants should never have to use these (unless you want to create your own dataset).

- `data_utilities.py`: functions to use sirf.STIR to output prompts/mult_factors and additive_term
and handle Siemens data
- `create_initial_images.py`: functions+script to run OSEM and compute the "kappa" image from existing data
- `data_QC.py`: generates plots for QC
- Siemens mMR NEMA IQ data (on Zenodo)
- `download_Siemens_mMR_NEMA_IQ.py`: download and extract
- `prepare_mMR_NEMA_IQ_data.py`: prepare the data (prompts etc)
- `plot_BSREM_metrics.py`: plot objective functions/metrics after a BSREM run
- `run_BSREM.py` and `run_OSEM.py`: scripts to run these algorithms for a dataset

## Helpers

- `data_utilities.py`: functions to use sirf.STIR to output prompts/mult_factors and additive_term
and handle Siemens data
- `evaluation_utilities.py`: reading/plotting helpers for values of the objective function and metrics
- `PET_plot_functions.py`: plotting helpers
- `dataset_settings.py`: settings for display of good slices, subsets etc

## Sub-folders per data-set

These contain files specific to the data-set, e.g. for downloading, VOI conversion, settings used for recon, etc.
Warning: this will be messy, and might be specific to whoever prepared this data-set. For instance,
for the Siemens mMR NEMA IQ data (on Zenodo):
- `download_Siemens_mMR_NEMA_IQ.py`: download and extract
- `prepare_mMR_NEMA_IQ_data.py`: prepare the data (prompts etc)
- `BSREM_*.py`: functions with specific settings for a particular data-set
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
import os

import sirf.STIR as STIR
from petric import SRCDIR, MetricsWithTimeout, get_data
from sirf.contrib.BSREM.BSREM import BSREM1
from sirf.contrib.partitioner import partitioner
from SIRF_data_preparation.dataset_settings import get_settings

scanID = 'Siemens_Vision600_thorax'
settings = get_settings(scanID)
slices = settings.slices
num_subsets = settings.num_subsets
outdir = f"./output/{scanID}/BSREM"
outdir1 = f"./output/{scanID}/BSREM_cont"

data = get_data(srcdir=SRCDIR / scanID, outdir=outdir)
data_sub, acq_models, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term, data.mult_factors,
num_subsets, initial_image=data.OSEM_image)
# WARNING: modifies prior strength with 1/num_subsets (as currently needed for BSREM implementations)
data.prior.set_penalisation_factor(data.prior.get_penalisation_factor() / len(obj_funs))
data.prior.set_up(data.OSEM_image)
for f in obj_funs: # add prior evenly to every objective function
f.set_prior(data.prior)

OSEM_image = data.OSEM_image
# clean-up some data, now that we have the subsets
del data
try:
image1000 = STIR.ImageData(os.path.join(outdir, 'iter_1000.hv'))
except Exception:
algo = BSREM1(data_sub, obj_funs, initial=OSEM_image, initial_step_size=.3, relaxation_eta=.01,
update_objective_interval=5)
algo.run(1005, callbacks=[MetricsWithTimeout(seconds=3600 * 34, outdir=outdir)])

# restart

algo = BSREM1(data_sub, obj_funs, initial=image1000, initial_step_size=.3, relaxation_eta=.01,
update_objective_interval=5)
algo.run(9000, callbacks=[MetricsWithTimeout(seconds=3600 * 34, outdir=outdir1)])
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#! /bin/bash
# get rid of offset in niftis, as they are currently not in the reconstructed images
for f in *nii.gz; do
base=${f%%.nii.gz}
stir_math $base.hv $base.nii.gz
sed -i -e '/first pixel offset/ d' \
-e '/!INTERFILE/a !imaging modality := PT' \
$base.hv
# patient position is currently not in reconstructed images
# -e '/type of data/a patient orientation := feet_in' \
# -e '/type of data/a patient rotation := supine' \
invert_axis x ${base}_flip_x.hv ${base}.hv
invert_axis z ${base}.hv ${base}_flip_x.hv
rm ${base}_flip_x.*v
done

stir_math ../PETRIC/VOI_whole_object.hv phantom.hv
stir_math ../PETRIC/VOI_background.hv background.hv
for r in 1 4 7; do
stir_math ../PETRIC/VOI_${r}.hv VOI${r}_ds.hv
done
Empty file.
Empty file.
Original file line number Diff line number Diff line change
@@ -1,17 +1,25 @@
from petric import MetricsWithTimeout, get_data
from petric import SRCDIR, MetricsWithTimeout, get_data
from sirf.contrib.BSREM.BSREM import BSREM1
from sirf.contrib.partitioner import partitioner
from SIRF_data_preparation.dataset_settings import get_settings

data = get_data(srcdir="./data/Siemens_mMR_NEMA_IQ", outdir="./output/BSREM_mMR_NEMA_IQ")
num_subsets = 7
scanID = "Siemens_mMR_NEMA_IQ"
settings = get_settings(scanID)
slices = settings.slices
num_subsets = settings.num_subsets
outdir = f"./output/{scanID}/BSREM"

data = get_data(srcdir=SRCDIR / scanID, outdir=outdir)
data_sub, acq_models, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term, data.mult_factors,
num_subsets, initial_image=data.OSEM_image)
num_subsets, mode="staggered",
initial_image=data.OSEM_image)
# WARNING: modifies prior strength with 1/num_subsets (as currently needed for BSREM implementations)
data.prior.set_penalisation_factor(data.prior.get_penalisation_factor() / len(obj_funs))
data.prior.set_up(data.OSEM_image)
for f in obj_funs: # add prior evenly to every objective function
f.set_prior(data.prior)

algo = BSREM1(data_sub, obj_funs, initial=data.OSEM_image, initial_step_size=.3, relaxation_eta=.01,
update_objective_interval=10)
algo.run(5000, callbacks=[MetricsWithTimeout(transverse_slice=72, coronal_slice=109)])
algo = BSREM1(data_sub, obj_funs, initial=data.OSEM_image, initial_step_size=.3, relaxation_eta=.005,
update_objective_interval=80)
# %%
algo.run(15000, callbacks=[MetricsWithTimeout(**slices, outdir=outdir, seconds=3600 * 100)])
Loading

0 comments on commit 513473a

Please sign in to comment.