Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Additions to #177 #198

Merged
merged 26 commits into from
Mar 19, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
26 commits
Select commit Hold shift + click to select a range
a00a2be
Update subplot widths, test_plotting dataset access, remove square br…
BradyPlanden Feb 16, 2024
89e2932
Add plotting support for notebook rendering, adds kaleido as dependancy
BradyPlanden Feb 22, 2024
f853af9
+ diffevolution notebook
BradyPlanden Feb 22, 2024
fc06f1f
Merge branch '177-plotting-capabilities' into 177b-plotting-capabilities
BradyPlanden Feb 22, 2024
d7a71a7
fix missed deletion during merge
BradyPlanden Feb 22, 2024
4f5dbe6
Merge branch '177-plotting-capabilities' into 177b-plotting-capabilities
BradyPlanden Feb 23, 2024
ed2bf7c
Merge branch '177-plotting-capabilities' into 177b-plotting-capabilities
BradyPlanden Feb 23, 2024
4cf9108
Revamp model, problem, and cost object from numpy arrays to dictionar…
BradyPlanden Mar 1, 2024
3428c97
Fix ukf examples, temporarily limits ukf to signal output model
BradyPlanden Mar 1, 2024
43521da
default_variables to additional_variables w/ docstrings, updt. observ…
BradyPlanden Mar 2, 2024
67d2887
Fix integration test logic, add gradient landscape plots, pin pytest …
BradyPlanden Mar 4, 2024
b6a073b
Add tests for gradient plots, up coverage
BradyPlanden Mar 4, 2024
ee4cdff
Set default SciPyMinimize method to Nelder-Mead, clean-up repo
BradyPlanden Mar 4, 2024
66efaba
unicode fix for win notebooks, update prediction shape checks, remove…
BradyPlanden Mar 8, 2024
9b03734
Updt. cost2d/optim2d x0 shape/colour, revert conftest win platform un…
BradyPlanden Mar 13, 2024
e7aef79
Updt SciPy & BaseOptimiser for maximum iterations limit - fixes #237
BradyPlanden Mar 13, 2024
afd4990
add infeasible cost tests, remove redundant scipyminimise maxiter opt…
BradyPlanden Mar 13, 2024
a9ea84c
Merge pull request #224 from pybop-team/177c-plotting-capabilities
BradyPlanden Mar 13, 2024
05c7f20
Merge branch '177-plotting-capabilities' into 177b-plotting-capabilities
BradyPlanden Mar 14, 2024
db28440
Updt grad descent hypers for likelihood tests, add tol arg to scipy o…
BradyPlanden Mar 15, 2024
61d7d7a
Split kaleido dependancy to avoid windows hang
BradyPlanden Mar 15, 2024
744d166
small refactors and cleanup
BradyPlanden Mar 15, 2024
c1b3854
Updt changelog
BradyPlanden Mar 15, 2024
716c671
updt coverage, bugfix sigma check/wrap
BradyPlanden Mar 19, 2024
41cf0f8
Merge branch '177-plotting-capabilities' into 177b-plotting-capabilities
BradyPlanden Mar 19, 2024
a479136
coverage, bugfix model.simulateS1
BradyPlanden Mar 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Updt grad descent hypers for likelihood tests, add tol arg to scipy o…
…ptimisers, pass optimiser final cost as is
  • Loading branch information
BradyPlanden committed Mar 15, 2024
commit db284402100e0b1bb8c3ed0d4b6dad98f849ed8f
8 changes: 4 additions & 4 deletions pybop/_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,6 @@ def run(self):
x, final_cost = self._run_pints()
elif not self.pints:
x, final_cost = self._run_pybop()
if not self._minimising:
final_cost = -final_cost

# Store the optimised parameters
if self.cost.problem is not None:
Expand Down Expand Up @@ -374,8 +372,10 @@ def _run_pints(self):
# Store the optimised parameters
self.store_optimised_parameters(x)

# Return best position and score
return x, f if self._minimising else -f
# Return best position and the score used internally,
# i.e the negative log-likelihood in the case of
# self._minimising = False
return x, f

def f_guessed_tracking(self):
"""
Expand Down
2 changes: 1 addition & 1 deletion pybop/costs/_likelihoods.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,9 @@
y, dy = self.problem.evaluateS1(x)
for key in self.signal:
if len(y.get(key, [])) != len(self._target.get(key, [])):
likelihood = np.float64(np.inf)
dl = self._de * np.ones(self.n_parameters)
dl = self._dl * np.ones(self.n_parameters)
return -likelihood, -dl

Check warning on line 100 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L98-L100

Added lines #L98 - L100 were not covered by tests

r = np.array([self._target[signal] - y[signal] for signal in self.signal])

Expand Down Expand Up @@ -150,7 +150,7 @@

for key in self.signal:
if len(prediction.get(key, [])) != len(self._target.get(key, [])):
return -np.float64(np.inf) # prediction doesn't match target

Check warning on line 153 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L153

Added line #L153 was not covered by tests

e = np.array(
[
Expand All @@ -167,7 +167,7 @@
if self.n_outputs == 1:
return e.item()
else:
return np.sum(e)

Check warning on line 170 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L170

Added line #L170 was not covered by tests

def _evaluateS1(self, x, grad=None):
"""
Expand All @@ -176,14 +176,14 @@
"""
sigma = np.asarray(x[-self.n_outputs :])
if np.any(sigma <= 0):
return -np.float64(np.inf), self._de * np.ones(self.n_parameters)

Check warning on line 179 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L179

Added line #L179 was not covered by tests

y, dy = self.problem.evaluateS1(x[: -self.n_outputs])
for key in self.signal:
if len(y.get(key, [])) != len(self._target.get(key, [])):
likelihood = np.float64(np.inf)
dl = self._de * np.ones(self.n_parameters)
return -likelihood, -dl

Check warning on line 186 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L184-L186

Added lines #L184 - L186 were not covered by tests

r = np.array([self._target[signal] - y[signal] for signal in self.signal])

Expand All @@ -196,9 +196,9 @@
dl = np.concatenate((dl, dsigma))
return likelihood, dl
else:
r = r.reshape(self.n_outputs, self.problem.n_time_data)

Check warning on line 199 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L199

Added line #L199 was not covered by tests
likelihood = self._evaluate(x)
dl = sigma ** (-2.0) * np.sum((r[:, :, np.newaxis] * dy), axis=1)
dsigma = -self._n_times / sigma + sigma**-(3.0) * np.sum(r**2, axis=0)

Check warning on line 202 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L201-L202

Added lines #L201 - L202 were not covered by tests
dl = np.concatenate((dl, dsigma))
return likelihood, np.sum(dl, axis=1)

Check warning on line 204 in pybop/costs/_likelihoods.py

View check run for this annotation

Codecov / codecov/patch

pybop/costs/_likelihoods.py#L204

Added line #L204 was not covered by tests
10 changes: 8 additions & 2 deletions pybop/optimisers/scipy_optimisers.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,11 @@ class SciPyMinimize(BaseOptimiser):
Maximum number of iterations to perform.
"""

def __init__(self, method=None, bounds=None, maxiter=None):
def __init__(self, method=None, bounds=None, maxiter=None, tol=1e-5):
super().__init__()
self.method = method
self.bounds = bounds
self.tol = tol
self.options = {}
self._max_iterations = maxiter

Expand Down Expand Up @@ -79,6 +80,7 @@ def cost_wrapper(x):
x0,
method=self.method,
bounds=bounds,
tol=self.tol,
options=self.options,
callback=callback,
)
Expand Down Expand Up @@ -126,8 +128,11 @@ class SciPyDifferentialEvolution(BaseOptimiser):
The number of individuals in the population. Defaults to 15.
"""

def __init__(self, bounds=None, strategy="best1bin", maxiter=1000, popsize=15):
def __init__(
self, bounds=None, strategy="best1bin", maxiter=1000, popsize=15, tol=1e-5
):
super().__init__()
self.tol = tol
self.strategy = strategy
self._max_iterations = maxiter
self._population_size = popsize
Expand Down Expand Up @@ -178,6 +183,7 @@ def callback(x, convergence):
strategy=self.strategy,
maxiter=self._max_iterations,
popsize=self._population_size,
tol=self.tol,
callback=callback,
)

Expand Down
12 changes: 8 additions & 4 deletions tests/integration/test_parameterisations.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def spm_costs(self, model, parameters, cost_class, init_soc):
model, parameters, dataset, signal=signal, init_soc=init_soc
)
if cost_class in [pybop.GaussianLogLikelihoodKnownSigma]:
return cost_class(problem, sigma=[0.05, 0.05])
return cost_class(problem, sigma=[0.03, 0.03])
else:
return cost_class(problem)

Expand Down Expand Up @@ -123,7 +123,11 @@ def test_spm_optimisers(self, optimiser, spm_costs):
assert parameterisation._max_iterations == 125

elif optimiser in [pybop.GradientDescent]:
parameterisation.optimiser.set_learning_rate(0.02)
if isinstance(spm_costs, pybop.GaussianLogLikelihoodKnownSigma):
parameterisation.optimiser.set_learning_rate(1.8e-5)
parameterisation.set_min_iterations(150)
else:
parameterisation.optimiser.set_learning_rate(0.02)
parameterisation.set_max_iterations(150)
x, final_cost = parameterisation.run()

Expand Down Expand Up @@ -196,9 +200,9 @@ def test_multiple_signals(self, multi_optimiser, spm_two_signal_cost):

# Test each optimiser
parameterisation = pybop.Optimisation(
cost=spm_two_signal_cost, optimiser=multi_optimiser, sigma0=0.05
cost=spm_two_signal_cost, optimiser=multi_optimiser, sigma0=0.03
)
parameterisation.set_max_unchanged_iterations(iterations=15, threshold=5e-4)
parameterisation.set_max_unchanged_iterations(iterations=35, threshold=5e-4)
parameterisation.set_max_iterations(125)
initial_cost = parameterisation.cost(spm_two_signal_cost.x0)

Expand Down
Loading