Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace pybop.MAP with pybop.LogPosterior #483

Merged
merged 17 commits into from
Sep 9, 2024
Merged
Show file tree
Hide file tree
Changes from 16 commits
Commits
Show all changes
17 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 5 additions & 34 deletions examples/notebooks/comparing_cost_functions.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -15,36 +15,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"outputs": [],
"source": [
"%pip install --upgrade pip ipywidgets -q\n",
"%pip install pybop -q\n",
Expand Down Expand Up @@ -206,7 +177,7 @@
{
"data": {
"text/plain": [
"2.0269652551213878e-10"
"1.1753460077019054e-09"
]
},
"execution_count": null,
Expand Down Expand Up @@ -240,7 +211,7 @@
{
"data": {
"text/plain": [
"2.0269652551213878e-10"
"1.1753460077019054e-09"
]
},
"execution_count": null,
Expand Down Expand Up @@ -269,13 +240,13 @@
"name": "stdout",
"output_type": "stream",
"text": [
"[8.19241247e-05 5.09035035e-06]\n"
"[7.60957550e-05 5.48691392e-06]\n"
]
},
{
"data": {
"text/plain": [
"0.030472774516224807"
"0.014466627355628724"
]
},
"execution_count": null,
Expand Down
21 changes: 0 additions & 21 deletions examples/notebooks/cost-compute-methods.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -19,27 +19,6 @@
"id": "1",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
Expand Down
878 changes: 878 additions & 0 deletions examples/notebooks/maximum_a_posteriori.ipynb

Large diffs are not rendered by default.

31 changes: 1 addition & 30 deletions examples/notebooks/multi_model_identification.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -25,36 +25,7 @@
"id": "X87NUGPW04py",
"outputId": "0d785b07-7cff-4aeb-e60a-4ff5a669afbf"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"/Users/engs2510/Documents/Git/PyBOP/.nox/notebooks-overwrite/bin/python3: No module named pip\r\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"Note: you may need to restart the kernel to use updated packages.\n"
]
}
],
"outputs": [],
"source": [
"%pip install --upgrade pip ipywidgets pybamm -q\n",
"%pip install pybop -q"
Expand Down
4 changes: 2 additions & 2 deletions examples/scripts/spm_MAP.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@

# Generate problem, cost function, and optimisation class
problem = pybop.FittingProblem(model, parameters, dataset)
cost = pybop.MAP(problem, pybop.GaussianLogLikelihoodKnownSigma, sigma0=sigma)
optim = pybop.AdamW(
cost = pybop.LogPosterior(pybop.GaussianLogLikelihoodKnownSigma(problem, sigma0=sigma))
optim = pybop.IRPropMin(
cost,
sigma0=0.05,
max_unchanged_iterations=20,
Expand Down
1 change: 0 additions & 1 deletion pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@
GaussianLogLikelihood,
GaussianLogLikelihoodKnownSigma,
LogPosterior,
MAP,
)
from .costs._weighted_cost import WeightedCost

Expand Down
179 changes: 54 additions & 125 deletions pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from typing import Union
from typing import Optional, Union

import numpy as np
import scipy.stats as stats

import pybop
from pybop.costs.base_cost import BaseCost
from pybop.parameters.parameter import Parameter, Parameters
from pybop.parameters.priors import JointLogPrior, Uniform
from pybop.parameters.priors import BasePrior, JointLogPrior, Uniform
from pybop.problems.base_problem import BaseProblem


Expand Down Expand Up @@ -102,7 +104,7 @@ class GaussianLogLikelihood(BaseLikelihood):
def __init__(
self,
problem: BaseProblem,
sigma0: Union[float, list[float], list[Parameter]] = 0.002,
sigma0: Union[float, list[float], list[Parameter]] = 0.02,
dsigma_scale: float = 1.0,
):
super().__init__(problem)
Expand Down Expand Up @@ -202,113 +204,32 @@ def compute(
return e


class MAP(BaseLikelihood):
"""
Maximum a posteriori cost function.

Computes the maximum a posteriori cost function, which is the sum of the
log likelihood and the log prior. The goal of maximising is achieved by
setting minimising = False in the optimiser settings.

Inherits all parameters and attributes from ``BaseLikelihood``.

"""

def __init__(self, problem, likelihood, sigma0=None, gradient_step=1e-3):
super().__init__(problem)
self.sigma0 = sigma0
self.gradient_step = gradient_step
if self.sigma0 is None:
self.sigma0 = []
for param in self.problem.parameters:
self.sigma0.append(param.prior.sigma)

try:
self.likelihood = likelihood(problem=self.problem, sigma0=self.sigma0)
except Exception as e:
raise ValueError(
f"An error occurred when constructing the Likelihood class: {e}"
) from e

if hasattr(self, "likelihood") and not isinstance(
self.likelihood, BaseLikelihood
):
raise ValueError(f"{self.likelihood} must be a subclass of BaseLikelihood")

self.parameters = self.likelihood.parameters
self._has_separable_problem = self.likelihood.has_separable_problem

def compute(
self,
y: dict,
dy: np.ndarray = None,
calculate_grad: bool = False,
) -> Union[float, tuple[float, np.ndarray]]:
"""
Compute the Maximum a Posteriori for the given parameters.

If self._has_separable_problem is True, then this method only computes the
likelihood, without calling the problem.evaluate(). Else, problem.evaluate()
is called before computing the likelihood.

Returns
-------
float
The maximum a posteriori cost.
"""
# Verify we have dy if calculate_grad is True
self.verify_args(dy, calculate_grad)

log_prior = sum(param.prior.logpdf(param.value) for param in self.parameters)

if not np.isfinite(log_prior).any():
return (-np.inf, -self.grad_fail) if calculate_grad else -np.inf

if calculate_grad:
log_likelihood, dl = self.likelihood.compute(y, dy, calculate_grad=True)

# Compute a finite difference approximation of the gradient of the log prior
delta = self.parameters.initial_value() * self.gradient_step
prior_gradient = []

for parameter, step_size in zip(self.problem.parameters, delta):
param_value = parameter.value

log_prior_upper = parameter.prior.logpdf(param_value * (1 + step_size))
log_prior_lower = parameter.prior.logpdf(param_value * (1 - step_size))

gradient = (log_prior_upper - log_prior_lower) / (
2 * step_size * param_value + np.finfo(float).eps
)
prior_gradient.append(gradient)

posterior = log_likelihood + log_prior
total_gradient = dl + prior_gradient

return posterior, total_gradient

log_likelihood = self.likelihood.compute(y)
posterior = log_likelihood + log_prior
return posterior


class LogPosterior(BaseCost):
class LogPosterior(BaseLikelihood):
"""
The Log Posterior for a given problem.

Computes the log posterior which is the sum of the log
Computes the log posterior which is proportional to the sum of the log
likelihood and the log prior.

Inherits all parameters and attributes from ``BaseCost``.
Inherits all parameters and attributes from ``BaseLikelihood``.
"""
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved

def __init__(self, log_likelihood, log_prior=None):
def __init__(
self,
log_likelihood: BaseLikelihood,
log_prior: Optional[Union[pybop.BasePrior, stats.rv_continuous]] = None,
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
gradient_step: float = 1e-3,
):
super().__init__(problem=log_likelihood.problem)
self.gradient_step = gradient_step

# Store the likelihood and prior
self._log_likelihood = log_likelihood
self.parameters = self._log_likelihood.parameters
self._has_separable_problem = self._log_likelihood.has_separable_problem

if log_prior is None:
self._prior = JointLogPrior(*log_likelihood.problem.parameters.priors())
self._prior = JointLogPrior(*self.parameters.priors())
else:
self._prior = log_prior

Expand All @@ -319,26 +240,48 @@ def compute(
calculate_grad: bool = False,
) -> Union[float, tuple[float, np.ndarray]]:
"""
Calculate the posterior cost for a given set of parameters.
Calculate the posterior cost for a given forward model prediction.

Parameters
----------
x : array-like
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.
y : dict
The data for which to evaluate the cost.
dy : np.ndarray, optional
The correspond sensitivities in the data.
calculate_grad : bool, optional
Whether to calculate the gradient of the cost function.

Returns
-------
float
The posterior cost.
Union[float, Tuple[float, np.ndarray]]
The posterior cost, and optionally the gradient.
"""
# Verify we have dy if calculate_grad is True
self.verify_args(dy, calculate_grad)

if calculate_grad:
log_prior, dp = self._prior.logpdfS1(self.parameters.current_value())
if hasattr(self._prior, "logpdfS1"):
log_prior, dp = self._prior.logpdfS1(self.parameters.current_value())
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
else:
# Compute log prior first
log_prior = self._prior.logpdf(self.parameters.current_value())

# Compute a finite difference approximation of the gradient of the log prior
delta = self.parameters.initial_value() * self.gradient_step
dp = []

for parameter, step_size in zip(self.parameters, delta):
param_value = parameter.value
upper_value = param_value * (1 + step_size)
lower_value = param_value * (1 - step_size)

log_prior_upper = parameter.prior.logpdf(upper_value)
log_prior_lower = parameter.prior.logpdf(lower_value)

gradient = (log_prior_upper - log_prior_lower) / (
2 * step_size * param_value + np.finfo(float).eps
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
)
dp.append(gradient)
else:
log_prior = self._prior.logpdf(self.parameters.current_value())

Expand All @@ -359,24 +302,10 @@ def compute(
posterior = log_likelihood + log_prior
return posterior

def prior(self):
"""
Return the prior object.

Returns
-------
object
The prior object.
"""
@property
def prior(self) -> BasePrior:
return self._prior

def likelihood(self):
"""
Returns the likelihood.

Returns
-------
object
The likelihood object.
"""
@property
def likelihood(self) -> BaseLikelihood:
return self._log_likelihood
Loading
Loading