Skip to content

Commit

Permalink
Merge pull request #36 from SyneRBI/precond
Browse files Browse the repository at this point in the history
ISTA preconditioner
  • Loading branch information
casperdcl authored Jul 5, 2024
2 parents 5b1431f + 970bb18 commit 4301b7c
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 8 deletions.
28 changes: 22 additions & 6 deletions main_ISTA.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"""
from cil.optimisation.algorithms import ISTA, Algorithm
from cil.optimisation.functions import IndicatorBox, SGFunction
from cil.optimisation.utilities import ConstantStepSize, Sampler, callbacks
from cil.optimisation.utilities import ConstantStepSize, Preconditioner, Sampler, callbacks
from petric import Dataset
from sirf.contrib.partitioner import partitioner

Expand All @@ -28,14 +28,29 @@ def __call__(self, algorithm: Algorithm):
raise StopIteration


class MyPreconditioner(Preconditioner):
"""
Example based on the row-sum of the Hessian of the log-likelihood. See: Tsai et al. Fast Quasi-Newton Algorithms
for Penalized Reconstruction in Emission Tomography and Further Improvements via Preconditioning,
IEEE TMI https://doi.org/10.1109/tmi.2017.2786865
"""
def __init__(self, kappa):
# add an epsilon to avoid division by zero (probably should make epsilon dependent on kappa)
self.kappasq = kappa*kappa + 1e-6

def apply(self, algorithm, gradient, out=None):
return gradient.divide(self.kappasq, out=out)


class Submission(ISTA):
"""Stochastic subset version of preconditioned ISTA"""

# note that `issubclass(ISTA, Algorithm) == True`
def __init__(self, data: Dataset, num_subsets: int = 7, step_size: float = 1e-6,
update_objective_interval: int = 10):
"""
Initialisation function, setting up data & (hyper)parameters.
NB: in practice, `num_subsets` should likely be determined from the data.
WARNING: we also currently ignore the non-negativity constraint here.
This is just an example. Try to modify and improve it!
"""
data_sub, acq_models, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term,
Expand All @@ -45,11 +60,12 @@ def __init__(self, data: Dataset, num_subsets: int = 7, step_size: float = 1e-6,
f.set_prior(data.prior)

sampler = Sampler.random_without_replacement(len(obj_funs))
F = -SGFunction(obj_funs, sampler=sampler) # negative to turn minimiser into maximiser
step_size_rule = ConstantStepSize(step_size) # ISTA default step_size is 0.99*2.0/F.L
g = IndicatorBox(lower=1e-6, accelerated=False) # "non-negativity" constraint
f = -SGFunction(obj_funs, sampler=sampler) # negative to turn minimiser into maximiser
step_size_rule = ConstantStepSize(step_size) # ISTA default step_size is 0.99*2.0/F.L
g = IndicatorBox(lower=0, accelerated=False) # non-negativity constraint

super().__init__(initial=data.OSEM_image, f=F, g=g, step_size=step_size_rule,
preconditioner = MyPreconditioner(data.kappa)
super().__init__(initial=data.OSEM_image, f=f, g=g, step_size=step_size_rule, preconditioner=preconditioner,
update_objective_interval=update_objective_interval)


Expand Down
4 changes: 2 additions & 2 deletions petric.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def construct_RDP(penalty_strength, initial_image, kappa, max_scaling=1e-3):
return prior


Dataset = namedtuple('Dataset', ['acquired_data', 'additive_term', 'mult_factors', 'OSEM_image', 'prior'])
Dataset = namedtuple('Dataset', ['acquired_data', 'additive_term', 'mult_factors', 'OSEM_image', 'prior', 'kappa'])


def get_data(srcdir=".", outdir=OUTDIR, sirf_verbosity=0):
Expand All @@ -165,7 +165,7 @@ def get_data(srcdir=".", outdir=OUTDIR, sirf_verbosity=0):
penalty_strength = 1 / 700 # default choice
prior = construct_RDP(penalty_strength, OSEM_image, kappa)

return Dataset(acquired_data, additive_term, mult_factors, OSEM_image, prior)
return Dataset(acquired_data, additive_term, mult_factors, OSEM_image, prior, kappa)


if SRCDIR.is_dir():
Expand Down

0 comments on commit 4301b7c

Please sign in to comment.