Skip to content

Commit

Permalink
tests: updt to GaussLogLikelihood sigma0 values
Browse files Browse the repository at this point in the history
  • Loading branch information
BradyPlanden committed Jul 4, 2024
1 parent b209048 commit 1f33081
Showing 1 changed file with 4 additions and 6 deletions.
10 changes: 4 additions & 6 deletions tests/integration/test_spm_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma0=self.sigma0)
elif cost_class in [pybop.GaussianLogLikelihood]:
return cost_class(problem, sigma0=self.sigma0 * 2) # Initial sigma0 guess
return cost_class(problem, sigma0=self.sigma0 * 4) # Initial sigma0 guess
elif cost_class in [pybop.MAP]:
return cost_class(
problem, pybop.GaussianLogLikelihoodKnownSigma, sigma0=self.sigma0
Expand All @@ -98,9 +98,7 @@ def test_spm_optimisers(self, optimiser, spm_costs):
x0 = spm_costs.parameters.initial_value()
common_args = {
"cost": spm_costs,
"max_iterations": 125
if isinstance(spm_costs, pybop.GaussianLogLikelihood)
else 250,
"max_iterations": 250,
}

# Add sigma0 to ground truth for GaussianLogLikelihood
Expand All @@ -118,10 +116,10 @@ def test_spm_optimisers(self, optimiser, spm_costs):
optim.set_max_unchanged_iterations(iterations=45, absolute_tolerance=1e-5)

# AdamW will use lowest sigma0 for learning rate, so allow more iterations
if issubclass(optimiser, pybop.AdamW) and isinstance(
if issubclass(optimiser, (pybop.AdamW, pybop.IRPropMin)) and isinstance(
spm_costs, pybop.GaussianLogLikelihood
):
optim = optimiser(sigma0=0.0025, max_unchanged_iterations=75, **common_args)
optim = optimiser(max_unchanged_iterations=75, **common_args)

initial_cost = optim.cost(x0)
x, final_cost = optim.run()
Expand Down

0 comments on commit 1f33081

Please sign in to comment.