diff --git a/CHANGELOG.md b/CHANGELOG.md index 2df79cce2..9e26270dc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,7 @@ ## Features -- [#236](https://github.com/pybop-team/PyBOP/issues/236) - Restructures the optimiser classes to allow the passing of keyword arguments and fixes the setting of max_iterations and minimising. +- [#236](https://github.com/pybop-team/PyBOP/issues/236) - Restructures the optimiser classes, adds a new optimisation API through direct construction and keyword arguments, and fixes the setting of `max_iterations`, and `_minimising`. Introduces `pybop.BaseOptimiser`, `pybop.BasePintsOptimiser`, and `pybop.BaseSciPyOptimiser` classes. - [#321](https://github.com/pybop-team/PyBOP/pull/321) - Updates Prior classes with BaseClass, adds a `problem.sample_initial_conditions` method to improve stability of SciPy.Minimize optimiser. - [#249](https://github.com/pybop-team/PyBOP/pull/249) - Add WeppnerHuggins model and GITT example. - [#304](https://github.com/pybop-team/PyBOP/pull/304) - Decreases the testing suite completion time. diff --git a/examples/notebooks/optimiser_interface.ipynb b/examples/notebooks/optimiser_interface.ipynb new file mode 100644 index 000000000..bbd763132 --- /dev/null +++ b/examples/notebooks/optimiser_interface.ipynb @@ -0,0 +1,285 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "00940c64-4748-4b08-9a35-ea98ce311e71", + "metadata": {}, + "source": [ + "# Interacting with PyBOP optimisers\n", + "\n", + "This notebook introduces two interfaces to interact with PyBOP's optimiser classes.\n", + "\n", + "### Set the Environment" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "id": "dd0e1a20-1ba3-4ff5-8f6a-f9c6f25c2a4a", + "metadata": { + "execution": { + "iopub.execute_input": "2024-04-14T18:57:35.622147Z", + "iopub.status.busy": "2024-04-14T18:57:35.621660Z", + "iopub.status.idle": "2024-04-14T18:57:40.849137Z", + "shell.execute_reply": "2024-04-14T18:57:40.848620Z" + } + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Requirement already satisfied: pip in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (24.0)\n", + "Requirement already satisfied: ipywidgets in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (8.1.2)\n", + "Requirement already satisfied: comm>=0.1.3 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipywidgets) (0.2.2)\n", + "Requirement already satisfied: ipython>=6.1.0 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipywidgets) (8.23.0)\n", + "Requirement already satisfied: traitlets>=4.3.1 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipywidgets) (5.14.2)\n", + "Requirement already satisfied: widgetsnbextension~=4.0.10 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipywidgets) (4.0.10)\n", + "Requirement already satisfied: jupyterlab-widgets~=3.0.10 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipywidgets) (3.0.10)\n", + "Requirement already satisfied: decorator in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (5.1.1)\n", + "Requirement already satisfied: jedi>=0.16 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (0.19.1)\n", + "Requirement already satisfied: matplotlib-inline in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (0.1.6)\n", + "Requirement already satisfied: prompt-toolkit<3.1.0,>=3.0.41 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (3.0.43)\n", + "Requirement already satisfied: pygments>=2.4.0 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (2.17.2)\n", + "Requirement already satisfied: stack-data in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (0.6.3)\n", + "Requirement already satisfied: pexpect>4.3 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from ipython>=6.1.0->ipywidgets) (4.9.0)\n", + "Requirement already satisfied: parso<0.9.0,>=0.8.3 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets) (0.8.4)\n", + "Requirement already satisfied: ptyprocess>=0.5 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets) (0.7.0)\n", + "Requirement already satisfied: wcwidth in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from prompt-toolkit<3.1.0,>=3.0.41->ipython>=6.1.0->ipywidgets) (0.2.13)\n", + "Requirement already satisfied: executing>=1.2.0 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from stack-data->ipython>=6.1.0->ipywidgets) (2.0.1)\n", + "Requirement already satisfied: asttokens>=2.1.0 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from stack-data->ipython>=6.1.0->ipywidgets) (2.4.1)\n", + "Requirement already satisfied: pure-eval in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from stack-data->ipython>=6.1.0->ipywidgets) (0.2.2)\n", + "Requirement already satisfied: six>=1.12.0 in /Users/engs2510/.pyenv/versions/3.12.2/envs/pybop-3.12/lib/python3.12/site-packages (from asttokens>=2.1.0->stack-data->ipython>=6.1.0->ipywidgets) (1.16.0)\n", + "Note: you may need to restart the kernel to use updated packages.\n", + "Note: you may need to restart the kernel to use updated packages.\n" + ] + } + ], + "source": [ + "%pip install --upgrade pip ipywidgets\n", + "%pip install pybop -q\n", + "\n", + "# Import the necessary libraries\n", + "import numpy as np\n", + "\n", + "import pybop" + ] + }, + { + "cell_type": "markdown", + "id": "017695fd-ee78-4113-af18-2fea04cf6126", + "metadata": {}, + "source": [ + "## Setup the model, problem, and cost\n", + "\n", + "The code block below sets up the model, problem, and cost objects. For more information on this process, take a look at other notebooks in the examples directory." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "id": "c346b106-99a9-46bc-8b5d-d330ed911660", + "metadata": { + "execution": { + "iopub.execute_input": "2024-04-14T18:57:46.438835Z", + "iopub.status.busy": "2024-04-14T18:57:46.438684Z", + "iopub.status.idle": "2024-04-14T18:57:46.478613Z", + "shell.execute_reply": "2024-04-14T18:57:46.478339Z" + } + }, + "outputs": [], + "source": [ + "# Load the parameters\n", + "parameter_set = pybop.ParameterSet(\n", + " json_path=\"../scripts/parameters/initial_ecm_parameters.json\"\n", + ")\n", + "parameter_set.import_parameters()\n", + "# Define the model\n", + "model = pybop.empirical.Thevenin(\n", + " parameter_set=parameter_set, options={\"number of rc elements\": 1}\n", + ")\n", + "\n", + "# Define the parameters\n", + "parameters = [\n", + " pybop.Parameter(\n", + " \"R0 [Ohm]\",\n", + " prior=pybop.Gaussian(0.0002, 0.0001),\n", + " bounds=[1e-4, 1e-2],\n", + " )\n", + "]\n", + "\n", + "# Generate synthetic data\n", + "t_eval = np.arange(0, 900, 2)\n", + "values = model.predict(t_eval=t_eval)\n", + "\n", + "# Form dataset\n", + "dataset = pybop.Dataset(\n", + " {\n", + " \"Time [s]\": t_eval,\n", + " \"Current function [A]\": values[\"Current [A]\"].data,\n", + " \"Voltage [V]\": values[\"Voltage [V]\"].data,\n", + " }\n", + ")\n", + "\n", + "# Construct problem and cost\n", + "problem = pybop.FittingProblem(model, parameters, dataset)\n", + "cost = pybop.SumSquaredError(problem)" + ] + }, + { + "cell_type": "markdown", + "id": "3ef5b0da-f755-43c6-8904-79d7ee0f218c", + "metadata": {}, + "source": [ + "## Interacting with the Optimisers\n", + "\n", + "Now that we have setup the required objects, we can introduce the two interfaces fo interacting with PyBOP optimisers. These are:\n", + " \n", + "1. The direct optimiser (i.e. `pybop.XNES`)\n", + "2. The optimisation class (i.e. `pybop.Optimisation`)\n", + " \n", + "These two methods provide two equivalent ways of interacting with PyBOP's optimisers. The first method provides a direct way to select the Optimiser, with the second method being more general method with a default optimiser (`pybop.XNES`) set if you don't provide an optimiser. \n", + "\n", + "First, the direct interface is presented. With this interface the user can select from the [list of optimisers](https://github.com/pybop-team/PyBOP?tab=readme-ov-file#supported-methods) supported in PyBOP and construct them directly. Options can be passed as kwargs, or through get() / set() methods in the case of Pints' based optimisers." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "6244882e-11ad-4bfe-a512-f1c687a06a08", + "metadata": { + "execution": { + "iopub.execute_input": "2024-04-14T18:57:46.512725Z", + "iopub.status.busy": "2024-04-14T18:57:46.512597Z", + "iopub.status.idle": "2024-04-14T18:57:49.259154Z", + "shell.execute_reply": "2024-04-14T18:57:49.257712Z" + } + }, + "outputs": [], + "source": [ + "optim_one = pybop.XNES(\n", + " cost, max_iterations=50\n", + ") # Direct optimiser class with options as kwargs\n", + "optim_one.set_max_iterations(\n", + " 50\n", + ") # Alternatively, set() / get() methods for Pints' optimisers\n", + "x1, final_cost = optim_one.run()" + ] + }, + { + "cell_type": "markdown", + "id": "c62e23f7", + "metadata": {}, + "source": [ + "Next, the `Optimisation` interface is less direct than the previous one, but provides a single class to work with across PyBOP workflows. The options are passed the same way as the above method, through kwargs or get() / set() methods." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "id": "479fc846", + "metadata": {}, + "outputs": [], + "source": [ + "optim_two = pybop.Optimisation(\n", + " cost, optimiser=pybop.XNES, max_iterations=50\n", + ") # Optimisation class with options as kwargs\n", + "optim_two.set_max_iterations(\n", + " 50\n", + ") # Alternatively, set() / get() methods for Pints' optimisers\n", + "x2, final_cost = optim_two.run()" + ] + }, + { + "cell_type": "markdown", + "id": "5c6ea9fd", + "metadata": {}, + "source": [ + "We can show the equivalence of these two methods by comparing the optimiser objects:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "id": "de56587e", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "isinstance(optim_one, type(optim_two.optimiser))" + ] + }, + { + "cell_type": "markdown", + "id": "9f6634c0", + "metadata": {}, + "source": [ + "For completeness, we can show the optimiser solutions:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "66b74f3e", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Estimated parameters x1: [0.00099965]\n", + "Estimated parameters x2: [0.00099985]\n" + ] + } + ], + "source": [ + "print(\"Estimated parameters x1:\", x1)\n", + "print(\"Estimated parameters x2:\", x2)" + ] + }, + { + "cell_type": "markdown", + "id": "94653584", + "metadata": {}, + "source": [ + "## Closing Comments\n", + "\n", + "As both of these API's provide access to the same optimisers, please use either as you prefer. A couple things to note:\n", + "\n", + "- If you are using a SciPy-based optimiser (`pybop.SciPyMinimize`, `pybop.SciPyDifferentialEvolution`), the `set()` / `get()` methods for the optimiser options are not currently supported. These optimisers required options to be passed as kwargs.\n", + "- The optimiser passed to `pybop.Optimisation` must not be a constructed object." + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.2" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/examples/scripts/spm_pso.py b/examples/scripts/spm_pso.py index ddfa72bb0..4b99bd129 100644 --- a/examples/scripts/spm_pso.py +++ b/examples/scripts/spm_pso.py @@ -37,7 +37,7 @@ # Generate problem, cost function, and optimisation class problem = pybop.FittingProblem(model, parameters, dataset) cost = pybop.SumSquaredError(problem) -optim = pybop.PSO(cost, max_iterations=100) +optim = pybop.Optimisation(cost, optimiser=pybop.PSO, max_iterations=100) x, final_cost = optim.run() print("Estimated parameters:", x) diff --git a/examples/standalone/optimiser.py b/examples/standalone/optimiser.py index 89414e10b..eb16fe555 100644 --- a/examples/standalone/optimiser.py +++ b/examples/standalone/optimiser.py @@ -1,10 +1,10 @@ import numpy as np from scipy.optimize import minimize -from pybop import Optimisation +from pybop import BaseOptimiser -class StandaloneOptimiser(Optimisation): +class StandaloneOptimiser(BaseOptimiser): """ Defines an example standalone optimiser without a Cost. """ diff --git a/pybop/__init__.py b/pybop/__init__.py index 362e068ca..4e1b772fa 100644 --- a/pybop/__init__.py +++ b/pybop/__init__.py @@ -95,7 +95,7 @@ # # Optimiser class # -from ._optimisation import Optimisation +from .optimisers._optimisation import BaseOptimiser from .optimisers.base_optimiser import BasePintsOptimiser from .optimisers.scipy_optimisers import ( BaseSciPyOptimiser, @@ -103,7 +103,6 @@ SciPyDifferentialEvolution ) from .optimisers.pints_optimisers import ( - DefaultOptimiser, GradientDescent, Adam, CMAES, @@ -113,6 +112,7 @@ SNES, XNES, ) +from .optimisers.optimisation import Optimisation # # Parameter classes diff --git a/pybop/_optimisation.py b/pybop/optimisers/_optimisation.py similarity index 99% rename from pybop/_optimisation.py rename to pybop/optimisers/_optimisation.py index 1f57e96c6..713df4d49 100644 --- a/pybop/_optimisation.py +++ b/pybop/optimisers/_optimisation.py @@ -5,7 +5,7 @@ from pybop import BaseCost, BaseLikelihood, DesignCost -class Optimisation: +class BaseOptimiser: """ A base class for defining optimisation methods. diff --git a/pybop/optimisers/base_optimiser.py b/pybop/optimisers/base_optimiser.py index 0c3b77995..17d932722 100644 --- a/pybop/optimisers/base_optimiser.py +++ b/pybop/optimisers/base_optimiser.py @@ -1,10 +1,10 @@ import numpy as np import pints -from pybop import Optimisation +from pybop import BaseOptimiser -class BasePintsOptimiser(Optimisation): +class BasePintsOptimiser(BaseOptimiser): """ A base class for defining optimisation methods from the PINTS library. diff --git a/pybop/optimisers/optimisation.py b/pybop/optimisers/optimisation.py new file mode 100644 index 000000000..aaa0ab3bf --- /dev/null +++ b/pybop/optimisers/optimisation.py @@ -0,0 +1,65 @@ +from pybop import XNES, BasePintsOptimiser, BaseSciPyOptimiser + + +class Optimisation: + """ + A high-level class for optimisation using PyBOP or PINTS optimisers. + + This class provides an alternative API to the `PyBOP.Optimiser()` API, + specifically allowing for single user-friendly interface for the + optimisation process.The class can be used with either PyBOP or PINTS + optimisers. + + Parameters + ---------- + cost : pybop.BaseCost or pints.ErrorMeasure + An objective function to be optimized, which can be either a pybop.Cost + optimiser : pybop.Optimiser or subclass of pybop.BaseOptimiser, optional + An optimiser from either the PINTS or PyBOP framework to perform the optimization (default: None). + sigma0 : float or sequence, optional + Initial step size or standard deviation for the optimiser (default: None). + verbose : bool, optional + If True, the optimization progress is printed (default: False). + physical_viability : bool, optional + If True, the feasibility of the optimised parameters is checked (default: True). + allow_infeasible_solutions : bool, optional + If True, infeasible parameter values will be allowed in the optimisation (default: True). + + Attributes + ---------- + All attributes from the pybop.optimiser() class + + """ + + def __init__(self, cost, optimiser=None, **optimiser_kwargs): + self.__dict__["optimiser"] = ( + None # Pre-define optimiser to avoid recursion during initialisation + ) + if optimiser is None: + self.optimiser = XNES(cost, **optimiser_kwargs) + elif issubclass(optimiser, BasePintsOptimiser): + self.optimiser = optimiser(cost, **optimiser_kwargs) + elif issubclass(optimiser, BaseSciPyOptimiser): + self.optimiser = optimiser(cost, **optimiser_kwargs) + else: + raise ValueError("Unknown optimiser type") + + def run(self): + return self.optimiser.run() + + def __getattr__(self, attr): + if "optimiser" in self.__dict__ and hasattr(self.optimiser, attr): + return getattr(self.optimiser, attr) + raise AttributeError( + f"'{self.__class__.__name__}' object has no attribute '{attr}'" + ) + + def __setattr__(self, name: str, value) -> None: + if ( + name in self.__dict__ + or "optimiser" not in self.__dict__ + or not hasattr(self.optimiser, name) + ): + object.__setattr__(self, name, value) + else: + setattr(self.optimiser, name, value) diff --git a/pybop/optimisers/pints_optimisers.py b/pybop/optimisers/pints_optimisers.py index da0390d57..e3d8ee31b 100644 --- a/pybop/optimisers/pints_optimisers.py +++ b/pybop/optimisers/pints_optimisers.py @@ -233,10 +233,3 @@ def __init__(self, cost, **optimiser_kwargs): + "Please choose another optimiser." ) super().__init__(cost, pints.CMAES, **optimiser_kwargs) - - -class DefaultOptimiser(XNES): - """ - Provides a default option for new users, selected to be the Exponential Natural - Evolution Strategy (XNES) optimiser from PINTS. - """ diff --git a/pybop/optimisers/scipy_optimisers.py b/pybop/optimisers/scipy_optimisers.py index 16a8e898a..6c2498093 100644 --- a/pybop/optimisers/scipy_optimisers.py +++ b/pybop/optimisers/scipy_optimisers.py @@ -1,10 +1,10 @@ import numpy as np from scipy.optimize import differential_evolution, minimize -from pybop import Optimisation +from pybop import BaseOptimiser -class BaseSciPyOptimiser(Optimisation): +class BaseSciPyOptimiser(BaseOptimiser): """ A base class for defining optimisation methods from the SciPy library. diff --git a/pybop/plotting/plot2d.py b/pybop/plotting/plot2d.py index 0a6c2ab02..957279613 100644 --- a/pybop/plotting/plot2d.py +++ b/pybop/plotting/plot2d.py @@ -45,7 +45,7 @@ def plot2d( """ # Assign input as a cost or optimisation object - if isinstance(cost_or_optim, pybop.Optimisation): + if isinstance(cost_or_optim, (pybop.BaseOptimiser, pybop.Optimisation)): optim = cost_or_optim plot_optim = True cost = optim.cost diff --git a/tests/integration/test_optimisation_options.py b/tests/integration/test_optimisation_options.py index 92020af20..1505a37dd 100644 --- a/tests/integration/test_optimisation_options.py +++ b/tests/integration/test_optimisation_options.py @@ -80,8 +80,9 @@ def spm_costs(self, model, parameters, cost_class): ) @pytest.mark.integration def test_optimisation_f_guessed(self, f_guessed, spm_costs): + x0 = spm_costs.x0 # Test each optimiser - parameterisation = pybop.XNES( + optim = pybop.XNES( cost=spm_costs, sigma0=0.05, max_iterations=125, @@ -92,16 +93,17 @@ def test_optimisation_f_guessed(self, f_guessed, spm_costs): # Set parallelisation if not on Windows if sys.platform != "win32": - parameterisation.set_parallel(True) + optim.set_parallel(True) - initial_cost = parameterisation.cost(spm_costs.x0) - x, final_cost = parameterisation.run() + initial_cost = optim.cost(x0) + x, final_cost = optim.run() # Assertions - if parameterisation.minimising: - assert initial_cost > final_cost - else: - assert initial_cost < final_cost + if not np.allclose(x0, self.ground_truth, atol=1e-5): + if optim.minimising: + assert initial_cost > final_cost + else: + assert initial_cost < final_cost np.testing.assert_allclose(x, self.ground_truth, atol=2.5e-2) def getdata(self, model, x, init_soc): diff --git a/tests/integration/test_spm_parameterisations.py b/tests/integration/test_spm_parameterisations.py index 8400c7f29..eada8d9b9 100644 --- a/tests/integration/test_spm_parameterisations.py +++ b/tests/integration/test_spm_parameterisations.py @@ -110,16 +110,21 @@ def test_spm_optimisers(self, optimiser, spm_costs): spm_costs.bounds = bounds # Test each optimiser - parameterisation = optimiser(cost=spm_costs, sigma0=0.05, max_iterations=125) + if optimiser in [pybop.PSO]: + optim = pybop.Optimisation( + cost=spm_costs, optimiser=optimiser, sigma0=0.05, max_iterations=125 + ) + else: + optim = optimiser(cost=spm_costs, sigma0=0.05, max_iterations=125) if issubclass(optimiser, pybop.BasePintsOptimiser): - parameterisation.set_max_unchanged_iterations(iterations=35, threshold=1e-5) + optim.set_max_unchanged_iterations(iterations=35, threshold=1e-5) - initial_cost = parameterisation.cost(x0) - x, final_cost = parameterisation.run() + initial_cost = optim.cost(x0) + x, final_cost = optim.run() # Assertions if not np.allclose(x0, self.ground_truth, atol=1e-5): - if parameterisation.minimising: + if optim.minimising: assert initial_cost > final_cost else: assert initial_cost < final_cost @@ -183,18 +188,18 @@ def test_multiple_signals(self, multi_optimiser, spm_two_signal_cost): spm_two_signal_cost.bounds = bounds # Test each optimiser - parameterisation = multi_optimiser( + optim = multi_optimiser( cost=spm_two_signal_cost, sigma0=0.03, max_iterations=125 ) if issubclass(multi_optimiser, pybop.BasePintsOptimiser): - parameterisation.set_max_unchanged_iterations(iterations=35, threshold=5e-4) + optim.set_max_unchanged_iterations(iterations=35, threshold=5e-4) - initial_cost = parameterisation.cost(spm_two_signal_cost.x0) - x, final_cost = parameterisation.run() + initial_cost = optim.cost(spm_two_signal_cost.x0) + x, final_cost = optim.run() # Assertions if not np.allclose(x0, self.ground_truth, atol=1e-5): - if parameterisation.minimising: + if optim.minimising: assert initial_cost > final_cost else: assert initial_cost < final_cost @@ -231,9 +236,12 @@ def test_model_misparameterisation(self, parameters, model, init_soc): # Run the optimisation problem x, final_cost = parameterisation.run() - # Assertions + # Assertion for final_cost with np.testing.assert_raises(AssertionError): np.testing.assert_allclose(final_cost, 0, atol=1e-2) + + # Assertion for x + with np.testing.assert_raises(AssertionError): np.testing.assert_allclose(x, self.ground_truth, atol=2e-2) def getdata(self, model, x, init_soc): diff --git a/tests/integration/test_thevenin_parameterisation.py b/tests/integration/test_thevenin_parameterisation.py index 3ef6b53bb..5ac6e84ef 100644 --- a/tests/integration/test_thevenin_parameterisation.py +++ b/tests/integration/test_thevenin_parameterisation.py @@ -70,26 +70,26 @@ def cost(self, model, parameters, cost_class): def test_optimisers_on_simple_model(self, optimiser, cost): x0 = cost.x0 if optimiser in [pybop.GradientDescent]: - parameterisation = optimiser( + optim = optimiser( cost=cost, sigma0=2.5e-4, max_iterations=250, ) else: - parameterisation = optimiser( + optim = optimiser( cost=cost, sigma0=0.03, max_iterations=250, ) if isinstance(optimiser, pybop.BasePintsOptimiser): - parameterisation.set_max_unchanged_iterations(iterations=55, threshold=1e-5) + optim.set_max_unchanged_iterations(iterations=55, threshold=1e-5) - initial_cost = parameterisation.cost(x0) - x, final_cost = parameterisation.run() + initial_cost = optim.cost(x0) + x, final_cost = optim.run() # Assertions if not np.allclose(x0, self.ground_truth, atol=1e-5): - if parameterisation.minimising: + if optim.minimising: assert initial_cost > final_cost else: assert initial_cost < final_cost diff --git a/tests/unit/test_optimisation.py b/tests/unit/test_optimisation.py index efdb0db6a..2bfce2ba2 100644 --- a/tests/unit/test_optimisation.py +++ b/tests/unit/test_optimisation.py @@ -90,6 +90,12 @@ def test_optimiser_classes(self, two_param_cost, optimiser, expected_name): assert optim.cost is not None assert optim.name() == expected_name + # Test pybop.Optimisation construction + optim = pybop.Optimisation(cost=cost, optimiser=optimiser) + + assert optim.cost is not None + assert optim.name() == expected_name + if optimiser not in [pybop.SciPyDifferentialEvolution]: # Test construction without bounds optim = optimiser(cost=cost, bounds=None) @@ -235,7 +241,7 @@ def test_invalid_cost(self): Exception, match="The cost is not a recognised cost object or function.", ): - pybop.DefaultOptimiser(cost="Invalid string") + pybop.Optimisation(cost="Invalid string") def invalid_cost(x): return [1, 2] @@ -244,13 +250,20 @@ def invalid_cost(x): Exception, match="not a scalar numeric value.", ): - pybop.DefaultOptimiser(cost=invalid_cost) + pybop.Optimisation(cost=invalid_cost) @pytest.mark.unit def test_default_optimiser(self, cost): - optim = pybop.DefaultOptimiser(cost=cost) + optim = pybop.Optimisation(cost=cost) assert optim.name() == "Exponential Natural Evolution Strategy (xNES)" + # Test incorrect setting attribute + with pytest.raises( + AttributeError, + match="'Optimisation' object has no attribute 'not_a_valid_attribute'", + ): + optim.not_a_valid_attribute + @pytest.mark.unit def test_incorrect_optimiser_class(self, cost): class RandomClass: @@ -263,13 +276,16 @@ class RandomClass: pybop.BasePintsOptimiser(cost=cost, pints_optimiser=RandomClass) with pytest.raises(NotImplementedError): - pybop.Optimisation(cost=cost) + pybop.BaseOptimiser(cost=cost) + + with pytest.raises(ValueError): + pybop.Optimisation(cost=cost, optimiser=RandomClass) @pytest.mark.unit def test_prior_sampling(self, cost): # Tests prior sampling for i in range(50): - optim = pybop.DefaultOptimiser(cost=cost) + optim = pybop.Optimisation(cost=cost) assert optim.x0 <= 0.62 and optim.x0 >= 0.58 @@ -342,7 +358,7 @@ def test_halting(self, cost): with pytest.raises(ValueError): optim.set_max_unchanged_iterations(1, threshold=-1) - optim = pybop.DefaultOptimiser(cost=cost) + optim = pybop.Optimisation(cost=cost) # Trigger threshold optim._threshold = np.inf @@ -387,5 +403,5 @@ def test_infeasible_solutions(self, cost): @pytest.mark.unit def test_unphysical_result(self, cost): # Trigger parameters not physically viable warning - optim = pybop.DefaultOptimiser(cost=cost) + optim = pybop.Optimisation(cost=cost) optim.check_optimal_parameters(np.array([2])) diff --git a/tests/unit/test_plots.py b/tests/unit/test_plots.py index b0dd9ad36..f5998c0af 100644 --- a/tests/unit/test_plots.py +++ b/tests/unit/test_plots.py @@ -108,7 +108,7 @@ def test_cost_plots(self, cost): @pytest.fixture def optim(self, cost): # Define and run an example optimisation - optim = pybop.DefaultOptimiser(cost) + optim = pybop.Optimisation(cost) optim.run() return optim diff --git a/tests/unit/test_standalone.py b/tests/unit/test_standalone.py index 8a2c22bfd..d524555a5 100644 --- a/tests/unit/test_standalone.py +++ b/tests/unit/test_standalone.py @@ -29,14 +29,15 @@ def test_standalone_optimiser(self): np.testing.assert_allclose(x, [2, 4], atol=1e-2) @pytest.mark.unit - def test_standalone_cost(self): + def test_optimisation_on_standalone_cost(self): # Build an Optimisation problem with a StandaloneCost cost = StandaloneCost() optim = pybop.SciPyDifferentialEvolution(cost=cost) x, final_cost = optim.run() - np.testing.assert_allclose(x, 0, atol=1e-2) - np.testing.assert_allclose(final_cost, 42, atol=1e-2) + initial_cost = optim.cost(cost.x0) + assert initial_cost > final_cost + np.testing.assert_allclose(final_cost, 42, atol=1e-1) @pytest.mark.unit def test_standalone_problem(self):