Skip to content

Commit

Permalink
Merge pull request #275 from pybop-team/248-add-maximum-a-posteriori
Browse files Browse the repository at this point in the history
Add Maximum a Posteriori (MAP)
  • Loading branch information
BradyPlanden authored Apr 11, 2024
2 parents 8cb625a + 1d6f893 commit 0509598
Show file tree
Hide file tree
Showing 7 changed files with 194 additions and 6 deletions.
8 changes: 5 additions & 3 deletions .github/workflows/test_on_push.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,8 @@ jobs:
run: nox -s coverage

- name: Upload coverage report
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
verbose: true
token: ${{ secrets.CODECOV_TOKEN }}
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## Features

- [#275](https://github.com/pybop-team/PyBOP/pull/275) - Adds Maximum a Posteriori (MAP) cost function with corresponding tests.
- [#273](https://github.com/pybop-team/PyBOP/pull/273) - Adds notebooks to nox examples session and updates CI workflows for change.
- [#250](https://github.com/pybop-team/PyBOP/pull/250) - Adds DFN, MPM, MSMR models and moves multiple construction variables to BaseEChem. Adds exception catch on simulate & simulateS1.
- [#241](https://github.com/pybop-team/PyBOP/pull/241) - Adds experimental circuit model fitting notebook with LG M50 data.
Expand Down
71 changes: 71 additions & 0 deletions examples/scripts/spm_MAP.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
import numpy as np

import pybop

# Define model
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
model = pybop.lithium_ion.SPM(parameter_set=parameter_set)

# Fitting parameters
parameters = [
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.6, 0.05),
bounds=[0.5, 0.8],
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.48, 0.05),
bounds=[0.4, 0.7],
),
]

# Set initial parameter values
parameter_set.update(
{
"Negative electrode active material volume fraction": 0.63,
"Positive electrode active material volume fraction": 0.51,
}
)
# Generate data
sigma = 0.005
t_eval = np.arange(0, 900, 2)
values = model.predict(t_eval=t_eval)
corrupt_values = values["Voltage [V]"].data + np.random.normal(0, sigma, len(t_eval))

# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": t_eval,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": corrupt_values,
}
)

# Generate problem, cost function, and optimisation class
problem = pybop.FittingProblem(model, parameters, dataset)
cost = pybop.MAP(problem, pybop.GaussianLogLikelihoodKnownSigma)
optim = pybop.Optimisation(cost, optimiser=pybop.CMAES)
optim.set_max_unchanged_iterations(20)
optim.set_min_iterations(20)
optim.set_max_iterations(100)

# Run the optimisation
x, final_cost = optim.run()
print("Estimated parameters:", x)

# Plot the timeseries output
pybop.quick_plot(problem, parameter_values=x[0:2], title="Optimised Comparison")

# Plot convergence
pybop.plot_convergence(optim)

# Plot the parameter traces
pybop.plot_parameters(optim)

# Plot the cost landscape
pybop.plot2d(cost, steps=15)

# Plot the cost landscape with optimisation path
bounds = np.array([[0.55, 0.77], [0.48, 0.68]])
pybop.plot2d(optim, bounds=bounds, steps=15)
1 change: 1 addition & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@
RootMeanSquaredError,
SumSquaredError,
ObserverCost,
MAP,
)
from .costs.design_costs import (
DesignCost,
Expand Down
87 changes: 87 additions & 0 deletions pybop/costs/fitting_costs.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import numpy as np

from pybop.costs._likelihoods import BaseLikelihood
from pybop.costs.base_cost import BaseCost
from pybop.observers.observer import Observer

Expand Down Expand Up @@ -279,3 +280,89 @@ def evaluateS1(self, x):
If an error occurs during the calculation of the cost or gradient.
"""
raise NotImplementedError


class MAP(BaseLikelihood):
"""
Maximum a posteriori cost function.
Computes the maximum a posteriori cost function, which is the sum of the
negative log likelihood and the log prior.
Inherits all parameters and attributes from ``BaseLikelihood``.
"""

def __init__(self, problem, likelihood, sigma=None):
super(MAP, self).__init__(problem)
self.sigma0 = sigma
if self.sigma0 is None:
self.sigma0 = []
for param in self.problem.parameters:
self.sigma0.append(param.prior.sigma)

try:
self.likelihood = likelihood(problem=self.problem, sigma=self.sigma0)
except Exception as e:
raise ValueError(
f"An error occurred when constructing the Likelihood class: {e}"
)

if hasattr(self, "likelihood") and not isinstance(
self.likelihood, BaseLikelihood
):
raise ValueError(f"{self.likelihood} must be a subclass of BaseLikelihood")

def _evaluate(self, x, grad=None):
"""
Calculate the maximum a posteriori cost for a given set of parameters.
Parameters
----------
x : array-like
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.
Returns
-------
float
The maximum a posteriori cost.
"""
log_likelihood = self.likelihood.evaluate(x)
log_prior = sum(
param.prior.logpdf(x_i) for x_i, param in zip(x, self.problem.parameters)
)

posterior = log_likelihood + log_prior
return posterior

def _evaluateS1(self, x):
"""
Compute the maximum a posteriori with respect to the parameters.
The method passes the likelihood gradient to the optimiser without modification.
Parameters
----------
x : array-like
The parameters for which to compute the cost and gradient.
Returns
-------
tuple
A tuple containing the cost and the gradient. The cost is a float,
and the gradient is an array-like of the same length as `x`.
Raises
------
ValueError
If an error occurs during the calculation of the cost or gradient.
"""
log_likelihood, dl = self.likelihood.evaluateS1(x)
log_prior = sum(
param.prior.logpdf(x_i) for x_i, param in zip(x, self.problem.parameters)
)

posterior = log_likelihood + log_prior
return posterior, dl
11 changes: 10 additions & 1 deletion tests/integration/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def init_soc(self, request):
pybop.GaussianLogLikelihoodKnownSigma,
pybop.RootMeanSquaredError,
pybop.SumSquaredError,
pybop.MAP,
]
)
def cost_class(self, request):
Expand Down Expand Up @@ -72,6 +73,10 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
)
if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma=[0.03, 0.03])
elif cost_class in [pybop.MAP]:
return cost_class(
problem, pybop.GaussianLogLikelihoodKnownSigma, sigma=[0.03, 0.03]
)
else:
return cost_class(problem)

Expand Down Expand Up @@ -125,7 +130,9 @@ def test_spm_optimisers(self, optimiser, spm_costs):
assert parameterisation._max_iterations == 125

elif optimiser in [pybop.GradientDescent]:
if isinstance(spm_costs, pybop.GaussianLogLikelihoodKnownSigma):
if isinstance(
spm_costs, (pybop.GaussianLogLikelihoodKnownSigma, pybop.MAP)
):
parameterisation.optimiser.set_learning_rate(1.8e-5)
parameterisation.set_min_iterations(150)
else:
Expand Down Expand Up @@ -177,6 +184,8 @@ def spm_two_signal_cost(self, parameters, model, cost_class):

if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma=[0.05, 0.05])
elif cost_class in [pybop.MAP]:
return cost_class(problem, pybop.GaussianLogLikelihoodKnownSigma)
else:
return cost_class(problem)

Expand Down
21 changes: 19 additions & 2 deletions tests/unit/test_cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,12 +73,15 @@ def problem(self, model, parameters, dataset, signal, x0, request):
pybop.RootMeanSquaredError,
pybop.SumSquaredError,
pybop.ObserverCost,
pybop.MAP,
]
)
def cost(self, problem, request):
cls = request.param
if cls in [pybop.SumSquaredError, pybop.RootMeanSquaredError]:
return cls(problem)
elif cls in [pybop.MAP]:
return cls(problem, pybop.GaussianLogLikelihoodKnownSigma)
elif cls in [pybop.ObserverCost]:
inputs = {p.name: problem.x0[i] for i, p in enumerate(problem.parameters)}
state = problem._model.reinit(inputs)
Expand Down Expand Up @@ -119,10 +122,24 @@ def test_design_base(self, problem):
with pytest.raises(NotImplementedError):
design_cost([0.5])

@pytest.mark.unit
def test_MAP(self, problem):
# Incorrect likelihood
with pytest.raises(ValueError):
pybop.MAP(problem, pybop.SumSquaredError)

# Incorrect construction of likelihood
with pytest.raises(ValueError):
pybop.MAP(problem, pybop.GaussianLogLikelihoodKnownSigma, sigma="string")

@pytest.mark.unit
def test_costs(self, cost):
higher_cost = cost([0.55])
lower_cost = cost([0.52])
if isinstance(cost, pybop.BaseLikelihood):
higher_cost = cost([0.52])
lower_cost = cost([0.55])
else:
higher_cost = cost([0.55])
lower_cost = cost([0.52])
assert higher_cost > lower_cost or (
higher_cost == lower_cost and higher_cost == np.inf
)
Expand Down

0 comments on commit 0509598

Please sign in to comment.