Skip to content

Commit

Permalink
refactor: clean-up sa cooling attrs, update hypers in integration test
Browse files Browse the repository at this point in the history
  • Loading branch information
BradyPlanden committed Jan 23, 2025
1 parent 3677062 commit d08829d
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 6 deletions.
8 changes: 5 additions & 3 deletions pybop/optimisers/_simulated_annealing.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def ask(self):
to evaluate from the optimiser.
"""
# Update temperature
self._temp = self._initial_temp * (self._temp_decay**self._iterations)
self._temp *= self._temp_decay

# Generate new point with random perturbation
step = np.random.normal(0, self._sigma0, size=len(self._current))
Expand Down Expand Up @@ -105,7 +105,7 @@ def tell(self, reply):
if fx < self._current_f:
accept = True
else:
p = np.exp(-(fx - self._current_f) / self._temp)
p = np.exp(-(fx - self._current_f) / (np.finfo(float).eps + self._temp))
accept = np.random.random() < p

if accept:
Expand Down Expand Up @@ -168,7 +168,9 @@ def cooling_rate(self, alpha):
"""
Sets the cooling rate for the temperature schedule.
"""
if not isinstance(alpha, (int, float)) or not 0 < alpha < 1:
if not isinstance(alpha, (int, float)):
raise TypeError("Cooling rate must be a number")

Check warning on line 172 in pybop/optimisers/_simulated_annealing.py

View check run for this annotation

Codecov / codecov/patch

pybop/optimisers/_simulated_annealing.py#L172

Added line #L172 was not covered by tests
if not 0 < alpha < 1:
raise ValueError("Cooling rate must be between 0 and 1")

Check warning on line 174 in pybop/optimisers/_simulated_annealing.py

View check run for this annotation

Codecov / codecov/patch

pybop/optimisers/_simulated_annealing.py#L174

Added line #L174 was not covered by tests
self._temp_decay = float(alpha)

Expand Down
7 changes: 4 additions & 3 deletions tests/integration/test_spm_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def optim(self, optimiser, model, parameters, cost, init_soc):
"cost": cost,
"max_iterations": 450,
"absolute_tolerance": 1e-6,
"max_unchanged_iterations": 450
"max_unchanged_iterations": 400 # 500
if optimiser is pybop.SimulatedAnnealing
else 55,
"sigma0": [0.05, 0.05, 1e-3]
Expand All @@ -147,9 +147,10 @@ def optim(self, optimiser, model, parameters, cost, init_soc):
optim = optimiser(**common_args)

if isinstance(optim, pybop.SimulatedAnnealing):
optim.optimiser.sigma0 = [0.125, 0.125]
optim.optimiser.sigma0 = [0.1, 0.1]
optim.optimiser.cooling_rate = 0.8 # Cool quickly
if isinstance(cost, pybop.GaussianLogLikelihood):
optim.optimiser.sigma0.extend([1e-3])
optim.optimiser.sigma0.extend([3e-3])

return optim

Expand Down

0 comments on commit d08829d

Please sign in to comment.