Skip to content

Commit

Permalink
Merge pull request #319 from pybop-team/307-add-cuckoo-optimiser
Browse files Browse the repository at this point in the history
Adds cuckoo optimiser
  • Loading branch information
BradyPlanden authored Jul 5, 2024
2 parents 7a85dd0 + 86a297a commit 59e2ce1
Show file tree
Hide file tree
Showing 9 changed files with 661 additions and 142 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## Features

- [#319](https://github.com/pybop-team/PyBOP/pull/319/) - Adds `CuckooSearch` optimiser with corresponding tests.
- [#379](https://github.com/pybop-team/PyBOP/pull/379) - Adds model.simulateS1 to weekly benchmarks.
- [#174](https://github.com/pybop-team/PyBOP/issues/174) - Adds new logo and updates Readme for accessibility.
- [#316](https://github.com/pybop-team/PyBOP/pull/316) - Adds Adam with weight decay (AdamW) optimiser, adds depreciation warning for pints.Adam implementation.
Expand Down
340 changes: 267 additions & 73 deletions examples/notebooks/multi_optimiser_identification.ipynb

Large diffs are not rendered by default.

218 changes: 153 additions & 65 deletions examples/notebooks/optimiser_calibration.ipynb

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@
# Optimiser class
#

from .optimisers._cuckoo import CuckooSearchImpl
from .optimisers._adamw import AdamWImpl
from .optimisers.base_optimiser import BaseOptimiser, Result
from .optimisers.base_pints_optimiser import BasePintsOptimiser
Expand All @@ -120,6 +121,7 @@
PSO,
SNES,
XNES,
CuckooSearch,
AdamW,
)
from .optimisers.optimisation import Optimisation
Expand Down
2 changes: 1 addition & 1 deletion pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ class MAP(BaseLikelihood):
"""

def __init__(self, problem, likelihood, sigma0=None, gradient_step=1e-2):
def __init__(self, problem, likelihood, sigma0=None, gradient_step=1e-3):
super(MAP, self).__init__(problem)
self.sigma0 = sigma0
self.gradient_step = gradient_step
Expand Down
197 changes: 197 additions & 0 deletions pybop/optimisers/_cuckoo.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,197 @@
import numpy as np
from pints import PopulationBasedOptimiser
from scipy.special import gamma


class CuckooSearchImpl(PopulationBasedOptimiser):
"""
Cuckoo Search (CS) optimisation algorithm, inspired by the brood parasitism
of some cuckoo species. This algorithm was introduced by Yang and Deb in 2009.
The algorithm uses a population of host nests (solutions), where each cuckoo
(new solution) tries to replace a worse nest in the population. The quality
or fitness of the nests is determined by the cost function. A fraction
of the worst nests is abandoned at each generation, and new ones are built
randomly.
The pseudo-code for the Cuckoo Search is as follows:
1. Initialise population of n host nests
2. While (t < max_generations):
a. Get a cuckoo randomly by Lévy flights
b. Evaluate its quality/fitness F
c. Choose a nest among n (say, j) randomly
d. If (F > fitness of j):
i. Replace j with the new solution
e. Abandon a fraction (pa) of the worst nests and build new ones
f. Keep the best solutions/nests
g. Rank the solutions and find the current best
3. End While
This implementation also uses a decreasing step size for the Lévy flights, calculated
as sigma = sigma0 / sqrt(iterations), where sigma0 is the initial step size and
iterations is the current iteration number.
Parameters:
- pa: Probability of discovering alien eggs/solutions (abandoning rate)
References:
- X. -S. Yang and Suash Deb, "Cuckoo Search via Lévy flights,"
2009 World Congress on Nature & Biologically Inspired Computing (NaBIC),
Coimbatore, India, 2009, pp. 210-214, https://doi.org/10.1109/NABIC.2009.5393690.
- S. Walton, O. Hassan, K. Morgan, M.R. Brown,
Modified cuckoo search: A new gradient free optimisation algorithm,
Chaos, Solitons & Fractals, Volume 44, Issue 9, 2011,
Pages 710-718, ISSN 0960-0779,
https://doi.org/10.1016/j.chaos.2011.06.004.
"""

def __init__(self, x0, sigma0=0.01, boundaries=None, pa=0.25):
super().__init__(x0, sigma0, boundaries=boundaries)

# Problem dimensionality
self._dim = len(x0)

# Population size and abandon rate
self._n = self._population_size
self._pa = pa
self.step_size = self._sigma0
self.beta = 1.5

# Set states
self._running = False
self._ready_for_tell = False

# Initialise nests
if self._boundaries:
self._nests = np.random.uniform(
low=self._boundaries.lower(),
high=self._boundaries.upper(),
size=(self._n, self._dim),
)
else:
self._nests = np.random.normal(
self._x0, self._sigma0, size=(self._n, self._dim)
)

self._fitness = np.full(self._n, np.inf)

# Initialise best solutions
self._x_best = np.copy(x0)
self._f_best = np.inf

# Set iteration count
self._iterations = 0

def ask(self):
"""
Returns a list of next points in the parameter-space
to evaluate from the optimiser.
"""
# Set flag to indicate that the optimiser is ready to receive replies
self._ready_for_tell = True
self._running = True

# Generate new solutions (cuckoos) by Lévy flights
self.step_size = self._sigma0 / max(1, np.sqrt(self._iterations))
step = self.levy_flight(self.beta, self._dim) * self.step_size
self.cuckoos = self._nests + step
return self.clip_nests(self.cuckoos)

def tell(self, replies):
"""
Receives a list of function values from the cost function from points
previously specified by `self.ask()`, and updates the optimiser state
accordingly.
"""
# Update iteration count
self._iterations += 1

# Compare cuckoos with current nests
for i in range(self._n):
f_new = replies[i]
if f_new < self._fitness[i]:
self._nests[i] = self.cuckoos[i]
self._fitness[i] = f_new
if f_new < self._f_best:
self._f_best = f_new
self._x_best = self.cuckoos[i]

# Abandon some worse nests
n_abandon = int(self._pa * self._n)
worst_nests = np.argsort(self._fitness)[-n_abandon:]
for idx in worst_nests:
self.abandon_nests(idx)
self._fitness[idx] = np.inf # reset fitness

def levy_flight(self, alpha, size):
"""
Generate step sizes via the Mantegna's algorithm for Levy flights
"""
from numpy import pi, power, random, sin

sigma_u = power(
(gamma(1 + alpha) * sin(pi * alpha / 2))
/ (gamma((1 + alpha) / 2) * alpha * power(2, (alpha - 1) / 2)),
1 / alpha,
)
sigma_v = 1

u = random.normal(0, sigma_u, size=size)
v = random.normal(0, sigma_v, size=size)
step = u / power(abs(v), 1 / alpha)

return step

def abandon_nests(self, idx):
"""
Updates the nests to abandon the worst performers and reinitialise.
"""
if self._boundaries:
self._nests[idx] = np.random.uniform(
low=self._boundaries.lower(),
high=self._boundaries.upper(),
)
else:
self._nests[idx] = np.random.normal(self._x0, self._sigma0)

def clip_nests(self, x):
"""
Clip the input array to the boundaries if available.
"""
if self._boundaries:
x = np.clip(x, self._boundaries.lower(), self._boundaries.upper())
return x

def _suggested_population_size(self):
"""
Inherited from Pints:PopulationBasedOptimiser.
Returns a suggested population size, based on the
dimension of the parameter space.
"""
return 4 + int(3 * np.log(self._n_parameters))

def running(self):
"""
Returns ``True`` if the optimisation is in progress.
"""
return self._running

def x_best(self):
"""
Returns the best parameter values found so far.
"""
return self._x_best

def f_best(self):
"""
Returns the best score found so far.
"""
return self._f_best

def name(self):
"""
Returns the name of the optimiser.
"""
return "Cuckoo Search"
30 changes: 29 additions & 1 deletion pybop/optimisers/pints_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from pints import IRPropMin as PintsIRPropMin
from pints import NelderMead as PintsNelderMead

from pybop import AdamWImpl, BasePintsOptimiser
from pybop import AdamWImpl, BasePintsOptimiser, CuckooSearchImpl


class GradientDescent(BasePintsOptimiser):
Expand Down Expand Up @@ -275,3 +275,31 @@ def __init__(self, cost, **optimiser_kwargs):
+ "Please choose another optimiser."
)
super().__init__(cost, PintsCMAES, **optimiser_kwargs)


class CuckooSearch(BasePintsOptimiser):
"""
Adapter for the Cuckoo Search optimiser in PyBOP.
Cuckoo Search is a population-based optimisation algorithm inspired by the brood parasitism of some cuckoo species.
It is designed to be simple, efficient, and robust, and is suitable for global optimisation problems.
Parameters
----------
**optimiser_kwargs : optional
Valid PyBOP option keys and their values, for example:
x0 : array_like
Initial parameter values.
sigma0 : float
Initial step size.
bounds : dict
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
upper bounds on the parameters.
See Also
--------
pybop.CuckooSearch : PyBOP implementation of Cuckoo Search algorithm.
"""

def __init__(self, cost, **optimiser_kwargs):
super().__init__(cost, CuckooSearchImpl, **optimiser_kwargs)
3 changes: 2 additions & 1 deletion tests/integration/test_spm_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
pybop.SciPyDifferentialEvolution,
pybop.AdamW,
pybop.CMAES,
pybop.CuckooSearch,
pybop.IRPropMin,
pybop.NelderMead,
pybop.SNES,
Expand All @@ -108,7 +109,7 @@ def test_spm_optimisers(self, optimiser, spm_costs):
)

# Set sigma0 and create optimiser
sigma0 = 0.01 if isinstance(spm_costs, pybop.GaussianLogLikelihood) else 0.05
sigma0 = 0.006 if isinstance(spm_costs, pybop.MAP) else None
optim = optimiser(sigma0=sigma0, **common_args)

# Set max unchanged iterations for BasePintsOptimisers
Expand Down
10 changes: 9 additions & 1 deletion tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@ def two_param_cost(self, model, two_parameters, dataset):
(pybop.Adam, "Adam"),
(pybop.AdamW, "AdamW"),
(pybop.CMAES, "Covariance Matrix Adaptation Evolution Strategy (CMA-ES)"),
(pybop.CuckooSearch, "Cuckoo Search"),
(pybop.SNES, "Seperable Natural Evolution Strategy (SNES)"),
(pybop.XNES, "Exponential Natural Evolution Strategy (xNES)"),
(pybop.PSO, "Particle Swarm Optimisation (PSO)"),
Expand Down Expand Up @@ -126,6 +127,7 @@ def test_no_optimisation_parameters(self, model, dataset):
pybop.PSO,
pybop.IRPropMin,
pybop.NelderMead,
pybop.CuckooSearch,
],
)
@pytest.mark.unit
Expand Down Expand Up @@ -175,6 +177,7 @@ def test_optimiser_kwargs(self, cost, optimiser):
):
warnings.simplefilter("always")
optim = optimiser(cost=cost, unrecognised=10)
assert not optim.pints_optimiser.running()
else:
# Check bounds in list format and update tol
bounds = [
Expand Down Expand Up @@ -249,7 +252,6 @@ def test_optimiser_kwargs(self, cost, optimiser):

# Check defaults
assert optim.pints_optimiser.n_hyper_parameters() == 5
assert not optim.pints_optimiser.running()
assert optim.pints_optimiser.x_guessed() == optim.pints_optimiser._x0
with pytest.raises(Exception):
optim.pints_optimiser.tell([0.1])
Expand All @@ -263,6 +265,12 @@ def test_optimiser_kwargs(self, cost, optimiser):
assert optim.x0 == x0_new
assert optim.x0 != x0

@pytest.mark.unit
def test_cuckoo_no_bounds(self, dataset, cost, model):
optim = pybop.CuckooSearch(cost=cost, bounds=None, max_iterations=1)
optim.run()
assert optim.pints_optimiser._boundaries is None

@pytest.mark.unit
def test_scipy_minimize_with_jac(self, cost):
# Check a method that uses gradient information
Expand Down

0 comments on commit 59e2ce1

Please sign in to comment.