Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds Transformation class #357

Merged
merged 25 commits into from
Aug 1, 2024
Merged
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
403a280
feat: initial transformation class
BradyPlanden May 30, 2024
433a108
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jun 12, 2024
a0a48f0
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jun 17, 2024
8530224
feat: Integrate transformations into Parameters API, adds log, scaled…
BradyPlanden Jun 18, 2024
4343f9a
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jun 18, 2024
7a12d6b
tests: add tests, remove redundant methods, add catch on x0==ground_t…
BradyPlanden Jun 21, 2024
9a4b34b
fix: add _verify_inputs, increase coverage, add temp fix to GaussianL…
BradyPlanden Jun 22, 2024
aab8995
docs: update docstrings
BradyPlanden Jun 22, 2024
feae89f
fix: add catch for transformation == None, updt tests for arg rename
BradyPlanden Jun 23, 2024
6c697ef
fix: old args for ScaledTransformation
BradyPlanden Jun 24, 2024
4e1f845
tests: add ComposedTransformation unit tests, increase coverage
BradyPlanden Jul 3, 2024
cf0df67
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jul 9, 2024
d12173d
fix: leftover merge items
BradyPlanden Jul 9, 2024
87dae1c
tests: increase coverage, add condition on prior bounds creation, bug…
BradyPlanden Jul 10, 2024
6a72568
Apply suggestions from code review
BradyPlanden Jul 10, 2024
4cec9fa
refacotr: remove general transformation methods for ComposedTransform…
BradyPlanden Jul 10, 2024
bae5e3e
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jul 10, 2024
0e07f2e
Merge branch 'develop' into 92-transformations-for-varying-scale-para…
BradyPlanden Jul 11, 2024
77a9f41
fix: apply ruff linting
BradyPlanden Jul 11, 2024
105bda2
fix: review suggestions.
BradyPlanden Jul 31, 2024
53d544c
fix: remainder suggestions.
BradyPlanden Jul 31, 2024
9f48b63
Merge branch 'refs/heads/develop' into 92-transformations-for-varying…
BradyPlanden Jul 31, 2024
0a5216c
tests: update transformation integration parameters, remove sigma0 va…
BradyPlanden Jul 31, 2024
54c0afe
Apply suggestions from review
BradyPlanden Aug 1, 2024
8dca6d1
Update file names, add changelog entry
BradyPlanden Aug 1, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions examples/scripts/spm_CMAES.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@
prior=pybop.Gaussian(6e-06, 0.1e-6),
bounds=[1e-6, 9e-6],
true_value=parameter_set["Negative particle radius [m]"],
transformation=pybop.LogTransformation(),
),
pybop.Parameter(
"Positive particle radius [m]",
prior=pybop.Gaussian(4.5e-06, 0.1e-6),
bounds=[1e-6, 9e-6],
true_value=parameter_set["Positive particle radius [m]"],
transformation=pybop.LogTransformation(),
),
)

Expand Down
4 changes: 2 additions & 2 deletions examples/standalone/cost.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ class StandaloneCost(pybop.BaseCost):

Methods
-------
__call__(x, grad=None)
__call__(x)
Calculate the cost for a given parameter value.
"""

Expand All @@ -43,7 +43,7 @@ def __init__(self, problem=None):
)
self.x0 = self.parameters.initial_value()

def _evaluate(self, inputs, grad=None):
def _evaluate(self, inputs):
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
"""
Calculate the cost for a given parameter value.

Expand Down
11 changes: 11 additions & 0 deletions pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,17 @@
from .problems.fitting_problem import FittingProblem
from .problems.design_problem import DesignProblem

#
# Transformation classes
#
from .transformation import Transformation
from .transformation._transformation import (
IdentityTransformation,
ScaledTransformation,
LogTransformation,
ComposedTransformation,
)

#
# Cost function class
#
Expand Down
9 changes: 3 additions & 6 deletions pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def __init__(self, problem: BaseProblem, sigma0: Union[list[float], float]):
self._multip = -1 / (2.0 * self.sigma2)
self._dl = np.ones(self.n_parameters)

def _evaluate(self, inputs: Inputs, grad: Union[None, np.ndarray] = None) -> float:
def _evaluate(self, inputs: Inputs) -> float:
"""
Evaluates the Gaussian log-likelihood for the given parameters with known sigma.
"""
Expand Down Expand Up @@ -173,7 +173,7 @@ def dsigma_scale(self, new_value):
raise ValueError("dsigma_scale must be non-negative")
self._dsigma_scale = new_value

def _evaluate(self, inputs: Inputs, grad: Union[None, np.ndarray] = None) -> float:
def _evaluate(self, inputs: Inputs) -> float:
"""
Evaluates the Gaussian log-likelihood for the given parameters.

Expand Down Expand Up @@ -285,17 +285,14 @@ def __init__(self, problem, likelihood, sigma0=None, gradient_step=1e-3):
):
raise ValueError(f"{self.likelihood} must be a subclass of BaseLikelihood")

def _evaluate(self, inputs: Inputs, grad=None) -> float:
def _evaluate(self, inputs: Inputs) -> float:
"""
Calculate the maximum a posteriori cost for a given set of parameters.

Parameters
----------
inputs : Inputs
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.

Returns
-------
Expand Down
49 changes: 33 additions & 16 deletions pybop/costs/base_cost.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from pybop import BaseProblem
from pybop import BaseProblem, ComposedTransformation, IdentityTransformation
from pybop.parameters.parameter import Inputs, Parameters


Expand All @@ -24,34 +24,46 @@ class BaseCost:

def __init__(self, problem=None):
self.parameters = Parameters()
self.transformation = None
self.problem = problem
if isinstance(self.problem, BaseProblem):
self._target = self.problem._target
self.parameters.join(self.problem.parameters)
self.n_outputs = self.problem.n_outputs
self.signal = self.problem.signal
self.transformation = self.construct_transformation()

@property
def n_parameters(self):
return len(self.parameters)
def construct_transformation(self):
"""
BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
Create a ComposedTransformation object from the individual parameters transformations.
"""
transformations = self.parameters.get_transformations()
if not transformations or all(t is None for t in transformations):
return None

def __call__(self, x, grad=None):
valid_transformations = [
t if t is not None else IdentityTransformation() for t in transformations
]
return ComposedTransformation(valid_transformations)

def __call__(self, x):
"""
Call the evaluate function for a given set of parameters.
"""
return self.evaluate(x, grad)
if self.transformation:
p = self.transformation.to_model(x)
return self.evaluate(p)
else:
return self.evaluate(x)

BradyPlanden marked this conversation as resolved.
Show resolved Hide resolved
def evaluate(self, x, grad=None):
def evaluate(self, x):
"""
Call the evaluate function for a given set of parameters.

Parameters
----------
x : array-like
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.

Returns
-------
Expand All @@ -66,15 +78,15 @@ def evaluate(self, x, grad=None):
inputs = self.parameters.verify(x)

try:
return self._evaluate(inputs, grad)
return self._evaluate(inputs)

except NotImplementedError as e:
raise e

except Exception as e:
raise ValueError(f"Error in cost calculation: {e}") from e

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Calculate the cost function value for a given set of parameters.

Expand All @@ -84,9 +96,6 @@ def _evaluate(self, inputs: Inputs, grad=None):
----------
inputs : Inputs
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.

Returns
-------
Expand Down Expand Up @@ -123,7 +132,11 @@ def evaluateS1(self, x):
inputs = self.parameters.verify(x)

try:
return self._evaluateS1(inputs)
if self.transformation:
p = self.transformation.to_model(inputs)
return self._evaluateS1(p)
else:
return self._evaluateS1(inputs)

except NotImplementedError as e:
raise e
Expand Down Expand Up @@ -152,3 +165,7 @@ def _evaluateS1(self, inputs: Inputs):
If the method has not been implemented by the subclass.
"""
raise NotImplementedError

@property
def n_parameters(self):
return len(self.parameters)
12 changes: 3 additions & 9 deletions pybop/costs/design_costs.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def update_simulation_data(self, inputs: Inputs):
self.problem._target = {key: solution[key] for key in self.problem.signal}
self.dt = solution["Time [s]"][1] - solution["Time [s]"][0]

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Computes the value of the cost function.

Expand All @@ -75,8 +75,6 @@ def _evaluate(self, inputs: Inputs, grad=None):
----------
inputs : Inputs
The parameters for which to compute the cost.
grad : array, optional
Gradient information, not used in this method.

Raises
------
Expand All @@ -99,16 +97,14 @@ class GravimetricEnergyDensity(DesignCost):
def __init__(self, problem, update_capacity=False):
super().__init__(problem, update_capacity)

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Computes the cost function for the energy density.

Parameters
----------
inputs : Inputs
The parameters for which to compute the cost.
grad : array, optional
Gradient information, not used in this method.

Returns
-------
Expand Down Expand Up @@ -155,16 +151,14 @@ class VolumetricEnergyDensity(DesignCost):
def __init__(self, problem, update_capacity=False):
super().__init__(problem, update_capacity)

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Computes the cost function for the energy density.

Parameters
----------
inputs : Inputs
The parameters for which to compute the cost.
grad : array, optional
Gradient information, not used in this method.

Returns
-------
Expand Down
9 changes: 3 additions & 6 deletions pybop/costs/fitting_costs.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(self, problem):
# Default fail gradient
self._de = 1.0

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Calculate the root mean square error for a given set of parameters.

Expand Down Expand Up @@ -136,17 +136,14 @@ def __init__(self, problem):
# Default fail gradient
self._de = 1.0

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Calculate the sum of squared errors for a given set of parameters.

Parameters
----------
inputs : Inputs
The parameters for which to evaluate the cost.
grad : array-like, optional
An array to store the gradient of the cost function with respect
to the parameters.

Returns
-------
Expand Down Expand Up @@ -234,7 +231,7 @@ def __init__(self, observer: Observer):
super().__init__(problem=observer)
self._observer = observer

def _evaluate(self, inputs: Inputs, grad=None):
def _evaluate(self, inputs: Inputs):
"""
Calculate the observer cost for a given set of parameters.

Expand Down
5 changes: 5 additions & 0 deletions pybop/optimisers/base_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,15 @@ def __init__(
self.verbose = False
self.log = dict(x=[], x_best=[], cost=[])
self.minimising = True
self.transformation = None
self.physical_viability = False
self.allow_infeasible_solutions = False
self.default_max_iterations = 1000
self.result = None

if isinstance(cost, BaseCost):
self.cost = cost
self.transformation = self.cost.transformation
self.parameters.join(cost.parameters)
self.set_allow_infeasible_solutions()
if isinstance(cost, (BaseLikelihood, DesignCost)):
Expand Down Expand Up @@ -131,6 +133,9 @@ def set_base_options(self):
# Set other options
self.verbose = self.unset_options.pop("verbose", self.verbose)
self.minimising = self.unset_options.pop("minimising", self.minimising)
self.transformation = self.unset_options.pop(
"transformation", self.transformation
)
if "allow_infeasible_solutions" in self.unset_options.keys():
self.set_allow_infeasible_solutions(
self.unset_options.pop("allow_infeasible_solutions")
Expand Down
15 changes: 6 additions & 9 deletions pybop/optimisers/base_pints_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,9 +48,6 @@
self._evaluations = None
self._iterations = None

# PyBOP doesn't currently support the PINTS transformation class
self._transformation = None

self.pints_optimiser = pints_optimiser
super().__init__(cost, **optimiser_kwargs)

Expand Down Expand Up @@ -200,8 +197,8 @@
return (L, dl) if self.minimising else (-L, -dl)
else:

def f(x, grad=None):
return self.cost(x, grad) if self.minimising else -self.cost(x, grad)
def f(x):
return self.cost(x) if self.minimising else -self.cost(x)

# Create evaluator object
if self._parallel:
Expand Down Expand Up @@ -325,8 +322,8 @@

# Show current parameters
x_user = self.pints_optimiser.x_guessed()
if self._transformation is not None:
x_user = self._transformation.to_model(x_user)
if self.transformation is not None:
x_user = self.transformation.to_model(x_user)

Check warning on line 326 in pybop/optimisers/base_pints_optimiser.py

View check run for this annotation

Codecov / codecov/patch

pybop/optimisers/base_pints_optimiser.py#L326

Added line #L326 was not covered by tests
for p in x_user:
print(PintsStrFloat(p))
print("-" * 40)
Expand All @@ -348,8 +345,8 @@
f = self.pints_optimiser.f_best()

# Inverse transform search parameters
if self._transformation is not None:
x = self._transformation.to_model(x)
if self.transformation is not None:
x = self.transformation.to_model(x)

return Result(
x=x, final_cost=f if self.minimising else -f, n_iterations=self._iterations
Expand Down
Loading
Loading