Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds Annealed Importance Sampling #550

Draft
wants to merge 6 commits into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 72 additions & 0 deletions examples/scripts/importance_sampling.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
import numpy as np
import pybamm

import pybop

# Parameter set and model definition
solver = pybamm.IDAKLUSolver()
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
parameter_set.update(
{
"Negative electrode active material volume fraction": 0.66,
"Positive electrode active material volume fraction": 0.68,
}
)
synth_model = pybop.lithium_ion.DFN(parameter_set=parameter_set, solver=solver)

# Fitting parameters
parameters = pybop.Parameters(
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.65, 0.05),
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.65, 0.05),
),
)

# Generate data
init_soc = 0.5
sigma = 0.002


def noise(sigma):
return np.random.normal(0, sigma, len(values["Voltage [V]"].data))


experiment = pybop.Experiment(
[
(
"Discharge at 0.5C for 1 minutes (5 second period)",
"Charge at 0.5C for 1 minutes (5 second period)",
"Discharge at 3C for 20 seconds (1 second period)",
),
]
)
values = synth_model.predict(
initial_state={"Initial SoC": init_soc}, experiment=experiment
)

# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": values["Time [s]"].data,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": values["Voltage [V]"].data + noise(sigma),
}
)

# Generate problem, likelihood, and sampler
model = pybop.lithium_ion.DFN(parameter_set=parameter_set, solver=pybamm.IDAKLUSolver())
model.build(initial_state={"Initial SoC": init_soc})
problem = pybop.FittingProblem(model, parameters, dataset)
likelihood = pybop.GaussianLogLikelihoodKnownSigma(problem, sigma0=sigma)
prior = pybop.JointLogPrior(*parameters.priors())

sampler = pybop.AnnealedImportanceSampler(
likelihood, prior, iterations=10, num_beta=300, cov0=np.eye(2) * 1e-2
)
mean, median, std, var = sampler.run()

print(f"mean: {mean}, std: {std}, median: {median}, var: {var}")
1 change: 1 addition & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,6 +161,7 @@
SliceRankShrinkingMCMC, SliceStepoutMCMC,
)
from .samplers.mcmc_sampler import MCMCSampler
from .samplers.annealed_importance import AnnealedImportanceSampler

#
# Observer classes
Expand Down
35 changes: 35 additions & 0 deletions pybop/parameters/priors.py
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,41 @@

return output, doutput

def rvs(self, size=1, random_state=None):
"""
Generates random variates from the joint distribution.

Parameters
----------
size : int
The number of random variates to generate.
random_state : int, optional
The random state seed for reproducibility. Default is None.

Returns
-------
array_like
An array of random variates from the distribution.

Raises
------
ValueError
If the size parameter is negative.
"""
if not isinstance(size, (int, tuple)):
raise ValueError(

Check warning on line 455 in pybop/parameters/priors.py

View check run for this annotation

Codecov / codecov/patch

pybop/parameters/priors.py#L454-L455

Added lines #L454 - L455 were not covered by tests
"size must be a positive integer or tuple of positive integers"
)
if isinstance(size, int) and size < 1:
raise ValueError("size must be a positive integer")
if isinstance(size, tuple) and any(s < 1 for s in size):
raise ValueError("size must be a tuple of positive integers")

Check warning on line 461 in pybop/parameters/priors.py

View check run for this annotation

Codecov / codecov/patch

pybop/parameters/priors.py#L458-L461

Added lines #L458 - L461 were not covered by tests

samples = []
for prior in self._priors:
samples.append(prior.rvs(size=size, random_state=random_state)[0])
return samples

Check warning on line 466 in pybop/parameters/priors.py

View check run for this annotation

Codecov / codecov/patch

pybop/parameters/priors.py#L463-L466

Added lines #L463 - L466 were not covered by tests

def __repr__(self) -> str:
priors_repr = ", ".join([repr(prior) for prior in self._priors])
return f"{self.__class__.__name__}(priors: [{priors_repr}])"
134 changes: 134 additions & 0 deletions pybop/samplers/annealed_importance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
from typing import Optional

import numpy as np

from pybop import BaseLikelihood, BasePrior


class AnnealedImportanceSampler:
"""
This class implements annealed importance sampling of
the posterior distribution to compute the model evidence
introduced in [1].

[1] "Annealed Importance Sampling", Radford M. Neal, 1998, Technical Report
No. 9805.
"""

def __init__(
self,
log_likelihood: BaseLikelihood,
log_prior: BasePrior,
x0=None,
cov0=None,
num_beta: int = 30,
iterations: Optional[int] = None,
):
self._log_likelihood = log_likelihood
self._log_prior = log_prior

Check warning on line 28 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L27-L28

Added lines #L27 - L28 were not covered by tests

# Set defaults for x0, cov0
self._x0 = (

Check warning on line 31 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L31

Added line #L31 was not covered by tests
x0 if x0 is not None else log_likelihood.parameters.initial_value()
) # Needs transformation
self._cov0 = (

Check warning on line 34 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L34

Added line #L34 was not covered by tests
cov0 if cov0 is not None else np.eye(log_likelihood.n_parameters) * 0.1
)

# Total number of iterations
self._iterations = (

Check warning on line 39 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L39

Added line #L39 was not covered by tests
iterations if iterations is not None else log_likelihood.n_parameters * 1000
)

# Number of beta divisions to consider 0 = beta_n <
# beta_n-1 < ... < beta_0 = 1
self._num_beta = 30

Check warning on line 45 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L45

Added line #L45 was not covered by tests

self.set_num_beta(num_beta)

Check warning on line 47 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L47

Added line #L47 was not covered by tests

@property
def iterations(self) -> int:
"""Returns the total number of iterations."""
return self._iterations

Check warning on line 52 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L52

Added line #L52 was not covered by tests

@iterations.setter
def iterations(self, value: int) -> None:
"""Sets the total number of iterations."""
if not isinstance(value, (int, np.integer)):
raise TypeError("iterations must be an integer")
if value <= 0:
raise ValueError("iterations must be positive")
self._iterations = int(value)

Check warning on line 61 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L57-L61

Added lines #L57 - L61 were not covered by tests

@property
def num_beta(self) -> int:
"""Returns the number of beta points"""
return self._num_beta

Check warning on line 66 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L66

Added line #L66 was not covered by tests

def set_num_beta(self, num_beta: int) -> None:
"""Sets the number of beta point values."""
if not isinstance(num_beta, (int, np.integer)):
raise TypeError("num_beta must be an integer")
if num_beta <= 1:
raise ValueError("num_beta must be greater than 1")
self._num_beta = num_beta
self._beta = np.linspace(0, 1, num_beta)

Check warning on line 75 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L70-L75

Added lines #L70 - L75 were not covered by tests

def run(self) -> tuple[float, float, float, float]:
"""
Run the annealed importance sampling algorithm.

Returns:
Tuple containing (mean, std, median, variance) of the log weights

Raises:
ValueError: If starting position has non-finite log-likelihood
"""
log_w = np.zeros(self._iterations)
current = self._x0.copy()

Check warning on line 88 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L87-L88

Added lines #L87 - L88 were not covered by tests

for i in range(self._iterations):
if not np.isfinite(self._log_likelihood(current)):
raise ValueError("Starting position has non-finite log-likelihood.")

Check warning on line 92 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L90-L92

Added lines #L90 - L92 were not covered by tests

log_likelihood_current = self._log_likelihood(current)
log_prior_current = self._log_prior(current)
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
current_f = log_prior_current + self._beta[0] * log_likelihood_current

Check warning on line 96 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L94-L96

Added lines #L94 - L96 were not covered by tests

log_density_current = np.zeros(self._num_beta)
log_density_previous = np.zeros(self._num_beta)
log_density_previous[0] = current_f

Check warning on line 100 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L98-L100

Added lines #L98 - L100 were not covered by tests
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here, log_density_previous[0] should be f_{1}(x_0) from eqn 11


# Main sampling loop
for j in range(1, self._num_beta):
proposed = np.random.multivariate_normal(current, self._cov0)

Check warning on line 104 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L103-L104

Added lines #L103 - L104 were not covered by tests

# Evaluate proposed state
log_likelihood_proposed = self._log_likelihood(proposed)
log_prior_proposed = self._log_prior(proposed)

Check warning on line 108 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L107-L108

Added lines #L107 - L108 were not covered by tests

# Store proposed
log_density_current[j - 1] = current_f

Check warning on line 111 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L111

Added line #L111 was not covered by tests

# Metropolis sampling step
if np.isfinite(log_likelihood_proposed):
proposed_f = (

Check warning on line 115 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L114-L115

Added lines #L114 - L115 were not covered by tests
log_prior_proposed + self._beta[j] * log_likelihood_proposed
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
)
acceptance_log_prob = proposed_f - current_f

Check warning on line 118 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L118

Added line #L118 was not covered by tests
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

for the gaussian test case they use in the paper, a much more complicated transition T_j is used, a sequence of 3 metropolis tests repeated 5-10 times. I'm not sure if all that is neccessary however


if np.log(np.random.rand()) < acceptance_log_prob:
current = proposed
current_f = proposed_f

Check warning on line 122 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L120-L122

Added lines #L120 - L122 were not covered by tests

log_density_previous[j] = current_f

Check warning on line 124 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L124

Added line #L124 was not covered by tests
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved

# Final state
log_density_current[self._num_beta - 1] = self._log_prior(

Check warning on line 127 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L127

Added line #L127 was not covered by tests
current
) + self._log_likelihood(current)
log_w[i] = np.sum(log_density_current) - np.sum(log_density_previous)

Check warning on line 130 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L130

Added line #L130 was not covered by tests

# Filter out zeros and return moments of generated chain
log_w = log_w[log_w != 0.0]
return np.mean(log_w), np.median(log_w), np.std(log_w), np.var(log_w)

Check warning on line 134 in pybop/samplers/annealed_importance.py

View check run for this annotation

Codecov / codecov/patch

pybop/samplers/annealed_importance.py#L133-L134

Added lines #L133 - L134 were not covered by tests
Loading