Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Removes depreciated Adam optimiser #598

Merged
merged 1 commit into from
Dec 17, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@

## Breaking Changes

- [#598](https://github.com/pybop-team/PyBOP/pull/598) - Depreciated `Adam` optimiser has been removed, see `AdamW` for replacement.
- [#531](https://github.com/pybop-team/PyBOP/pull/531) - Plot methods moved to `pybop.plot` with mostly minimal renaming. For example, `pybop.plot_parameters` is now `pybop.plot.parameters`. Other breaking changes include: `pybop.plot2d` to `pybop.plot.contour`.
- [#526](https://github.com/pybop-team/PyBOP/pull/526) - Refactor `OptimisationResults` classes, with `optim.run()` now return the full object. Adds finite cost value check for optimised parameters.

Expand Down
1 change: 0 additions & 1 deletion pybop/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,6 @@
)
from .optimisers.pints_optimisers import (
GradientDescent,
Adam,
CMAES,
IRPropMin,
NelderMead,
Expand Down
5 changes: 2 additions & 3 deletions pybop/optimisers/base_pints_optimiser.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import numpy as np
from pints import PSO as PintsPSO
from pints import Adam as PintsAdam
from pints import NelderMead as PintsNelderMead
from pints import Optimiser as PintsOptimiser
from pints import ParallelEvaluator as PintsParallelEvaluator
Expand All @@ -12,7 +11,7 @@
from pints import SequentialEvaluator as PintsSequentialEvaluator
from pints import strfloat as PintsStrFloat

from pybop import BaseOptimiser, GradientDescentImpl, OptimisationResult
from pybop import AdamWImpl, BaseOptimiser, GradientDescentImpl, OptimisationResult


class BasePintsOptimiser(BaseOptimiser):
Expand Down Expand Up @@ -143,7 +142,7 @@ def _sanitise_inputs(self):

# Convert bounds to PINTS boundaries
if self.bounds is not None:
ignored_optimisers = (GradientDescentImpl, PintsAdam, PintsNelderMead)
ignored_optimisers = (GradientDescentImpl, AdamWImpl, PintsNelderMead)
if issubclass(self._pints_optimiser, ignored_optimisers):
print(f"NOTE: Boundaries ignored by {self._pints_optimiser}")
self.bounds = None
Expand Down
80 changes: 0 additions & 80 deletions pybop/optimisers/pints_optimisers.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
from warnings import warn

from pints import CMAES as PintsCMAES
from pints import PSO as PintsPSO
from pints import SNES as PintsSNES
from pints import XNES as PintsXNES
from pints import Adam as PintsAdam
from pints import IRPropMin as PintsIRPropMin
from pints import NelderMead as PintsNelderMead

Expand Down Expand Up @@ -87,83 +84,6 @@ def __init__(
)


class Adam(BasePintsOptimiser):
"""
Implements the Adam optimisation algorithm.

This class extends the Adam optimiser from the PINTS library, which combines
ideas from RMSProp and Stochastic Gradient Descent with momentum.

Note that this optimiser does not support boundary constraints.

Parameters
----------
cost : callable
The cost function to be minimized.
max_iterations : int, optional
Maximum number of iterations for the optimisation.
min_iterations : int, optional (default=2)
Minimum number of iterations before termination.
max_unchanged_iterations : int, optional (default=15)
Maximum number of iterations without improvement before termination.
multistart : int, optional (default=1)
Number of optimiser restarts from randomly sample position. These positions
are sampled from the priors.
parallel : bool, optional (default=False)
Whether to run the optimisation in parallel.
**optimiser_kwargs : optional
Valid PINTS option keys and their values, for example:
x0 : array_like
Initial position from which optimisation will start.
sigma0 : float
Initial step size or standard deviation depending on the optimiser.
bounds : dict
A dictionary with 'lower' and 'upper' keys containing arrays for lower and
upper bounds on the parameters.
use_f_guessed : bool
Whether to return the guessed function values.
absolute_tolerance : float
Absolute tolerance for convergence checking.
relative_tolerance : float
Relative tolerance for convergence checking.
max_evaluations : int
Maximum number of function evaluations.
threshold : float
Threshold value for early termination.

See Also
--------
pints.Adam : The PINTS implementation this class is based on.
"""

warn(
"Adam is deprecated and will be removed in a future release. Please use AdamW instead.",
DeprecationWarning,
stacklevel=2,
)

def __init__(
self,
cost,
max_iterations: int = None,
min_iterations: int = 2,
max_unchanged_iterations: int = 15,
multistart: int = 1,
parallel: bool = False,
**optimiser_kwargs,
):
super().__init__(
cost,
PintsAdam,
max_iterations,
min_iterations,
max_unchanged_iterations,
multistart,
parallel,
**optimiser_kwargs,
)


class AdamW(BasePintsOptimiser):
"""
Implements the AdamW optimisation algorithm in PyBOP.
Expand Down
4 changes: 1 addition & 3 deletions tests/unit/test_optimisation.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ def two_param_cost(self, model, two_parameters, dataset):
(pybop.SciPyMinimize, "SciPyMinimize", False),
(pybop.SciPyDifferentialEvolution, "SciPyDifferentialEvolution", False),
(pybop.GradientDescent, "Gradient descent", True),
(pybop.Adam, "Adam", True),
(pybop.AdamW, "AdamW", True),
(
pybop.CMAES,
Expand Down Expand Up @@ -131,7 +130,6 @@ def test_no_optimisation_parameters(self, model, dataset):
pybop.SciPyMinimize,
pybop.SciPyDifferentialEvolution,
pybop.GradientDescent,
pybop.Adam,
pybop.AdamW,
pybop.SNES,
pybop.XNES,
Expand Down Expand Up @@ -184,7 +182,7 @@ def check_multistart(optim, n_iters, multistarts):
multistart_optim = optimiser(cost, multistart=2, max_iterations=6)
check_multistart(multistart_optim, 6, 2)

if optimiser in [pybop.GradientDescent, pybop.Adam, pybop.NelderMead]:
if optimiser in [pybop.GradientDescent, pybop.AdamW, pybop.NelderMead]:
optim = optimiser(cost=cost, bounds=cost_bounds)
assert optim.bounds is None
elif optimiser in [pybop.PSO]:
Expand Down
Loading