Skip to content

Commit

Permalink
Merge branch 'refs/heads/develop' into 410-bug-incorrect-axis-symbol-…
Browse files Browse the repository at this point in the history
…rendering-formu-in-notebooks

# Conflicts:
#	examples/notebooks/ecm_trust-constr.ipynb
  • Loading branch information
BradyPlanden committed Sep 5, 2024
2 parents 09c9dfb + ab6eca5 commit 9d6d04d
Show file tree
Hide file tree
Showing 21 changed files with 2,527 additions and 28 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ ci:

repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.6.2"
rev: "v0.6.3"
hooks:
- id: ruff
args: [--fix, --show-fixes]
Expand Down
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

## Features

- [#6](https://github.com/pybop-team/PyBOP/issues/6) - Adds Monte Carlo functionality, with methods based on Pints' algorithms. A base class is added `BaseSampler`, in addition to `PintsBaseSampler`.
- [#353](https://github.com/pybop-team/PyBOP/issues/353) - Allow user-defined check_params functions to enforce nonlinear constraints, and enable SciPy constrained optimisation methods
- [#222](https://github.com/pybop-team/PyBOP/issues/222) - Adds an example for performing and electrode balancing.
- [#441](https://github.com/pybop-team/PyBOP/issues/441) - Adds an example for estimating constants within a `pybamm.FunctionalParameter`.
Expand Down Expand Up @@ -38,7 +39,6 @@

## Bug Fixes


## Breaking Changes

# [v24.6](https://github.com/pybop-team/PyBOP/tree/v24.6) - 2024-07-08
Expand Down
14 changes: 8 additions & 6 deletions examples/scripts/ecm_tau_constraints.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from typing import Union

import numpy as np

import pybop
Expand Down Expand Up @@ -57,21 +59,21 @@


def get_parameter_checker(
tau_mins: float | list[float],
tau_maxs: float | list[float],
fitted_rc_pair_indices: int | list[int],
tau_mins: Union[float, list[float]],
tau_maxs: Union[float, list[float]],
fitted_rc_pair_indices: Union[int, list[int]],
):
"""Returns a function to check parameters against given tau bounds.
The resulting check_params function will be sent off to PyBOP; the
rest of the code does some light checking of the constraints.
Parameters
----------
tau_mins: float | list[float]
tau_mins: float or list[float]
Lower bounds on timescale tau_i = Ri * Ci
tau_maxs: float | list[float]
tau_maxs: float or list[float]
Upper bounds on timescale tau_i = Ri * Ci
fitted_rc_pair_indices: int | list[float]
fitted_rc_pair_indices: int or list[float]
The index of each RC pair whose parameters are to be fitted.
Eg. [1, 2] means fitting R1, R2, C1, C2. The timescale of RC
pair fitted_rc_pair_indices[j] is constrained to be in the
Expand Down
94 changes: 94 additions & 0 deletions examples/scripts/mcmc_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
import numpy as np
import plotly.graph_objects as go
import pybamm

import pybop

# Parameter set and model definition
solver = pybamm.IDAKLUSolver()
parameter_set = pybop.ParameterSet.pybamm("Chen2020")
parameter_set.update(
{
"Negative electrode active material volume fraction": 0.63,
"Positive electrode active material volume fraction": 0.71,
}
)
synth_model = pybop.lithium_ion.DFN(parameter_set=parameter_set, solver=solver)

# Fitting parameters
parameters = pybop.Parameters(
pybop.Parameter(
"Negative electrode active material volume fraction",
prior=pybop.Gaussian(0.68, 0.02),
transformation=pybop.LogTransformation(),
),
pybop.Parameter(
"Positive electrode active material volume fraction",
prior=pybop.Gaussian(0.65, 0.02),
transformation=pybop.LogTransformation(),
),
)

# Generate data
init_soc = 0.5
sigma = 0.002
experiment = pybop.Experiment(
[
("Discharge at 0.5C for 6 minutes (5 second period)",),
]
)
values = synth_model.predict(
initial_state={"Initial SoC": init_soc}, experiment=experiment
)


def noise(sigma):
return np.random.normal(0, sigma, len(values["Voltage [V]"].data))


# Form dataset
dataset = pybop.Dataset(
{
"Time [s]": values["Time [s]"].data,
"Current function [A]": values["Current [A]"].data,
"Voltage [V]": values["Voltage [V]"].data + noise(sigma),
"Bulk open-circuit voltage [V]": values["Bulk open-circuit voltage [V]"].data
+ noise(sigma),
}
)

model = pybop.lithium_ion.SPM(parameter_set=parameter_set, solver=pybamm.IDAKLUSolver())
model.build(initial_state={"Initial SoC": init_soc})
signal = ["Voltage [V]", "Bulk open-circuit voltage [V]"]

# Generate problem, likelihood, and sampler
problem = pybop.FittingProblem(model, parameters, dataset, signal=signal)
likelihood = pybop.GaussianLogLikelihoodKnownSigma(problem, sigma0=0.002)
posterior = pybop.LogPosterior(likelihood)

optim = pybop.DifferentialEvolutionMCMC(
posterior,
chains=3,
max_iterations=300,
warm_up=100,
verbose=True,
# parallel=True, # uncomment to enable parallelisation (MacOS/WSL/Linux only)
)
result = optim.run()

# Create a histogram
fig = go.Figure()
for _i, data in enumerate(result):
fig.add_trace(go.Histogram(x=data[:, 0], name="Neg", opacity=0.75))
fig.add_trace(go.Histogram(x=data[:, 1], name="Pos", opacity=0.75))

# Update layout for better visualization
fig.update_layout(
title="Posterior distribution of volume fractions",
xaxis_title="Value",
yaxis_title="Count",
barmode="overlay",
)

# Show the plot
fig.show()
29 changes: 24 additions & 5 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
#
from .parameters.parameter import Parameter, Parameters
from .parameters.parameter_set import ParameterSet
from .parameters.priors import BasePrior, Gaussian, Uniform, Exponential
from .parameters.priors import BasePrior, Gaussian, Uniform, Exponential, JointLogPrior

#
# Model classes
Expand All @@ -83,15 +83,15 @@
from .models.base_model import Inputs

#
# Problem class
# Problem classes
#
from .problems.base_problem import BaseProblem
from .problems.fitting_problem import FittingProblem
from .problems.multi_fitting_problem import MultiFittingProblem
from .problems.design_problem import DesignProblem

#
# Cost function class
# Cost classes
#
from .costs.base_cost import BaseCost
from .costs.fitting_costs import (
Expand All @@ -110,12 +110,13 @@
BaseLikelihood,
GaussianLogLikelihood,
GaussianLogLikelihoodKnownSigma,
LogPosterior,
MAP,
)
from .costs._weighted_cost import WeightedCost

#
# Optimiser class
# Optimiser classes
#

from .optimisers._cuckoo import CuckooSearchImpl
Expand All @@ -141,14 +142,32 @@
)
from .optimisers.optimisation import Optimisation

#
# Monte Carlo classes
#
from .samplers.base_sampler import BaseSampler
from .samplers.base_pints_sampler import BasePintsSampler
from .samplers.pints_samplers import (
NUTS, DREAM, AdaptiveCovarianceMCMC,
DifferentialEvolutionMCMC, DramACMC,
EmceeHammerMCMC,
HaarioACMC, HaarioBardenetACMC,
HamiltonianMCMC, MALAMCMC,
MetropolisRandomWalkMCMC, MonomialGammaHamiltonianMCMC,
PopulationMCMC, RaoBlackwellACMC,
RelativisticMCMC, SliceDoublingMCMC,
SliceRankShrinkingMCMC, SliceStepoutMCMC,
)
from .samplers.mcmc_sampler import MCMCSampler

#
# Observer classes
#
from .observers.unscented_kalman import UnscentedKalmanFilterObserver
from .observers.observer import Observer

#
# Plotting class
# Plotting classes
#
from .plotting.plotly_manager import PlotlyManager
from .plotting.standard_plots import StandardPlot, StandardSubplot, plot_trajectories
Expand Down
92 changes: 91 additions & 1 deletion pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from pybop.costs.base_cost import BaseCost
from pybop.parameters.parameter import Parameter, Parameters
from pybop.parameters.priors import Uniform
from pybop.parameters.priors import JointLogPrior, Uniform
from pybop.problems.base_problem import BaseProblem


Expand Down Expand Up @@ -290,3 +290,93 @@ def compute(
log_likelihood = self.likelihood.compute(y)
posterior = log_likelihood + log_prior
return posterior


class LogPosterior(BaseCost):
"""
The Log Posterior for a given problem.
Computes the log posterior which is the sum of the log
likelihood and the log prior.
Inherits all parameters and attributes from ``BaseCost``.
"""

def __init__(self, log_likelihood, log_prior=None):
super().__init__(problem=log_likelihood.problem)

# Store the likelihood and prior
self._log_likelihood = log_likelihood
if log_prior is None:
self._prior = JointLogPrior(*log_likelihood.problem.parameters.priors())
else:
self._prior = log_prior

def compute(
self,
y: dict,
dy: np.ndarray = None,
calculate_grad: bool = False,
) -> Union[float, tuple[float, np.ndarray]]:
"""
Calculate the posterior cost for a given set of parameters.
Parameters
----------
x : array-like
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.
Returns
-------
float
The posterior cost.
"""
# Verify we have dy if calculate_grad is True
self.verify_args(dy, calculate_grad)

if calculate_grad:
log_prior, dp = self._prior.logpdfS1(self.parameters.current_value())
else:
log_prior = self._prior.logpdf(self.parameters.current_value())

if not np.isfinite(log_prior).any():
return (-np.inf, -self.grad_fail) if calculate_grad else -np.inf

if calculate_grad:
log_likelihood, dl = self._log_likelihood.compute(
y, dy, calculate_grad=True
)

posterior = log_likelihood + log_prior
total_gradient = dl + dp

return posterior, total_gradient

log_likelihood = self._log_likelihood.compute(y)
posterior = log_likelihood + log_prior
return posterior

def prior(self):
"""
Return the prior object.
Returns
-------
object
The prior object.
"""
return self._prior

def likelihood(self):
"""
Returns the likelihood.
Returns
-------
object
The likelihood object.
"""
return self._log_likelihood
2 changes: 1 addition & 1 deletion pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ def __init__(
self.set_base_options()
self._set_up_optimiser()

# Throw an warning if any options remain
# Throw a warning if any options remain
if self.unset_options:
warnings.warn(
f"Unrecognised keyword arguments: {self.unset_options} will not be used.",
Expand Down
Loading

0 comments on commit 9d6d04d

Please sign in to comment.