Skip to content

Commit

Permalink
test: move SciPyMinimize method tests to integration, refactors theve…
Browse files Browse the repository at this point in the history
…nin integration parametrisation, set sigma0 with BaseOptimiser default
  • Loading branch information
BradyPlanden committed Oct 4, 2024
1 parent a966997 commit c4c3b9c
Show file tree
Hide file tree
Showing 4 changed files with 29 additions and 31 deletions.
2 changes: 1 addition & 1 deletion tests/integration/test_eis_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def optim(self, optimiser, model, parameters, cost, init_soc):
"max_unchanged_iterations": 35,
"sigma0": [0.05, 0.05, 1e-3]
if isinstance(cost, pybop.GaussianLogLikelihood)
else 0.05,
else 0.02,
}

if isinstance(cost, pybop.LogPosterior):
Expand Down
4 changes: 2 additions & 2 deletions tests/integration/test_spm_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def optim(self, optimiser, model, parameters, cost, init_soc):
"max_unchanged_iterations": 55,
"sigma0": [0.05, 0.05, 1e-3]
if isinstance(cost, pybop.GaussianLogLikelihood)
else 0.05,
else 0.02,
}

if isinstance(cost, pybop.LogPosterior):
Expand Down Expand Up @@ -217,7 +217,7 @@ def test_multiple_signals(self, multi_optimiser, spm_two_signal_cost):
"max_unchanged_iterations": 55,
"sigma0": [0.035, 0.035, 6e-3, 6e-3]
if isinstance(spm_two_signal_cost, pybop.GaussianLogLikelihood)
else None,
else 0.02,
}

# Test each optimiser
Expand Down
46 changes: 25 additions & 21 deletions tests/integration/test_thevenin_parameterisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,45 +47,46 @@ def parameters(self):
),
)

@pytest.fixture(params=[pybop.RootMeanSquaredError, pybop.SumSquaredError])
def cost_class(self, request):
return request.param

@pytest.fixture
def cost(self, model, parameters, cost_class):
def dataset(self, model):
# Form dataset
solution = self.get_data(model)
dataset = pybop.Dataset(
return pybop.Dataset(
{
"Time [s]": solution["Time [s]"].data,
"Current function [A]": solution["Current [A]"].data,
"Voltage [V]": solution["Voltage [V]"].data,
}
)

# Define the cost to optimise
problem = pybop.FittingProblem(model, parameters, dataset)
return cost_class(problem)

@pytest.mark.parametrize(
"optimiser",
[pybop.SciPyMinimize, pybop.GradientDescent, pybop.PSO],
"cost_class", [pybop.RootMeanSquaredError, pybop.SumSquaredError]
)
@pytest.mark.parametrize(
"optimiser, method",
[
(pybop.SciPyMinimize, "trust-constr"),
(pybop.SciPyMinimize, "SLSQP"),
(pybop.SciPyMinimize, "COBYLA"),
(pybop.GradientDescent, ""),
(pybop.PSO, ""),
],
)
@pytest.mark.integration
def test_optimisers_on_simple_model(self, optimiser, cost):
def test_optimisers_on_simple_model(
self, model, parameters, dataset, cost_class, optimiser, method
):
# Define the cost to optimise
problem = pybop.FittingProblem(model, parameters, dataset)
cost = cost_class(problem)

x0 = cost.parameters.initial_value()
if optimiser in [pybop.GradientDescent]:
optim = optimiser(
cost=cost,
sigma0=2.5e-4,
max_iterations=250,
cost=cost, sigma0=2.5e-4, max_iterations=250, method=method
)
else:
optim = optimiser(
cost=cost,
sigma0=0.03,
max_iterations=250,
)
optim = optimiser(cost=cost, sigma0=0.03, max_iterations=250, method=method)
if isinstance(optimiser, pybop.BasePintsOptimiser):
optim.set_max_unchanged_iterations(iterations=35, absolute_tolerance=1e-5)

Expand All @@ -102,6 +103,9 @@ def test_optimisers_on_simple_model(self, optimiser, cost):
raise ValueError("Initial value is the same as the ground truth value.")
np.testing.assert_allclose(results.x, self.ground_truth, atol=1.5e-2)

if isinstance(optimiser, pybop.SciPyMinimize):
assert results.scipy_result.success is True

def get_data(self, model):
experiment = pybop.Experiment(
[
Expand Down
8 changes: 1 addition & 7 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,15 +276,9 @@ def test_cuckoo_no_bounds(self, dataset, cost, model):
@pytest.mark.unit
def test_scipy_minimize_with_jac(self, cost):
# Check a method that uses gradient information
optim = pybop.SciPyMinimize(cost=cost, method="L-BFGS-B", jac=True, maxiter=10)
optim = pybop.SciPyMinimize(cost=cost, method="L-BFGS-B", jac=True, maxiter=1)
results = optim.run()
assert results.get_scipy_result() == optim.result.scipy_result
assert optim.result.scipy_result.success is True
# Check constraint-based methods, which have different callbacks / returns
for method in ["trust-constr", "SLSQP", "COBYLA"]:
optim = pybop.SciPyMinimize(cost=cost, method=method, maxiter=10)
optim.run()
assert optim.result.scipy_result.success

with pytest.raises(
ValueError,
Expand Down

0 comments on commit c4c3b9c

Please sign in to comment.