Skip to content

Commit

Permalink
Daniel&Dilan review changes
Browse files Browse the repository at this point in the history
  • Loading branch information
Doresic committed Dec 13, 2023
1 parent b2b9527 commit c53407e
Show file tree
Hide file tree
Showing 8 changed files with 77 additions and 44 deletions.
2 changes: 1 addition & 1 deletion doc/example/hierarchical.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"Parameters to be optimized in the inner problem are selected via the PEtab parameter table by setting a value in the non-standard column `parameterType` (`offset` for offset parameters, `scaling` for scaling parameters, and `sigma` for sigma parameters). When using hierarchical optimization, the nine overriding parameters {offset,scaling,sd}_{pSTAT5A_rel,pSTAT5B_rel,rSTAT5A_rel} are to estimated in the inner problem."
"Parameters to be optimized in the inner problem are specified via the PEtab parameter table by setting a value in the non-standard column `parameterType` (`offset` for offset parameters, `scaling` for scaling parameters, and `sigma` for sigma parameters). When using hierarchical optimization, the nine overriding parameters {offset,scaling,sd}_{pSTAT5A_rel,pSTAT5B_rel,rSTAT5A_rel} are to estimated in the inner problem."
]
},
{
Expand Down
1 change: 1 addition & 0 deletions pypesto/hierarchical/calculator.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def __call__(
inner_result[HESS] = np.full(
shape=(dim, dim), fill_value=np.nan
)
inner_result[INNER_PARAMETERS] = None
return inner_result

inner_parameters = self.inner_solver.solve(
Expand Down
6 changes: 5 additions & 1 deletion pypesto/hierarchical/inner_calculator_collector.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,11 @@ def __call__(
# only if the objective value improved.
if ret[FVAL] < self.best_fval:
ret[X_INNER_OPT] = all_inner_pars
ret[INNER_PARAMETERS] = interpretable_inner_pars
ret[INNER_PARAMETERS] = (
interpretable_inner_pars
if len(interpretable_inner_pars) > 0
else None
)
self.best_fval = ret[FVAL]

return filter_return_dict(ret)
Expand Down
2 changes: 1 addition & 1 deletion pypesto/hierarchical/spline_approximation/problem.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def get_inner_parameter_dictionary(self) -> Dict:
inner_par_dict[x_id] = x.value
return inner_par_dict

def get_inner_noise_parameters(self) -> Dict:
def get_inner_noise_parameters(self) -> list[float]:
"""Get a list with all noise parameter values."""
return [
x.value for x in self.get_xs_for_type(InnerParameterType.SIGMA)
Expand Down
6 changes: 3 additions & 3 deletions pypesto/objective/amici/amici.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import os
import tempfile
from collections import OrderedDict
from typing import TYPE_CHECKING, Dict, List, Optional, Sequence, Tuple, Union
from typing import TYPE_CHECKING, Dict, Optional, Sequence, Tuple, Union

import numpy as np

Expand Down Expand Up @@ -215,7 +215,7 @@ def __init__(
self.custom_timepoints = None

# Initialize the dictionary for saving of inner parameters.
self.inner_parameters: List[float] = []
self.inner_parameters: list[float] = []

def get_config(self) -> dict:
"""Return basic information of the objective configuration."""
Expand Down Expand Up @@ -456,7 +456,7 @@ def call_unprocessed(

nllh = ret[FVAL]
rdatas = ret[RDATAS]
if INNER_PARAMETERS in ret and any(ret[INNER_PARAMETERS]):
if INNER_PARAMETERS in ret and ret[INNER_PARAMETERS] is not None:
self.inner_parameters = ret[INNER_PARAMETERS]

# check whether we should update data for preequilibration guesses
Expand Down
23 changes: 12 additions & 11 deletions pypesto/optimize/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,13 @@ def __init__(self, optimizer: str):
)


def add_inner_parameters(
def _add_inner_parameters(
objective: Objective, optimizer_result: OptimizerResult
):
"""Add inner parameters from objective to the optimizer result."""
if hasattr(objective, INNER_PARAMETERS) and any(
objective.inner_parameters
if (
hasattr(objective, INNER_PARAMETERS)
and objective.inner_parameters is not None
):
optimizer_result[INNER_PARAMETERS] = objective.inner_parameters

Expand Down Expand Up @@ -472,7 +473,7 @@ def fun(x):
exitflag=res.status,
message=res.message,
)
add_inner_parameters(objective, optimizer_result)
_add_inner_parameters(objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -618,7 +619,7 @@ def get_fval_vararg(*x):

optimizer_result = OptimizerResult()

add_inner_parameters(objective, optimizer_result)
_add_inner_parameters(objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -682,7 +683,7 @@ def minimize(

optimizer_result = OptimizerResult(x=np.array(xopt), fval=fopt)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -771,7 +772,7 @@ def minimize(
x=np.array(result[0]), fval=result[1]
)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -842,7 +843,7 @@ def minimize(
x=np.array(result.x), fval=result.fun
)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -963,7 +964,7 @@ def successively_working_fval(swarm: np.ndarray) -> np.ndarray:
fval=float(cost),
)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -1197,7 +1198,7 @@ def nlopt_objective(x, grad):
exitflag=opt.last_optimize_result(),
)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down Expand Up @@ -1385,7 +1386,7 @@ def minimize(
exitflag=opt.exitflag,
)

add_inner_parameters(problem.objective, optimizer_result)
_add_inner_parameters(problem.objective, optimizer_result)

return optimizer_result

Expand Down
79 changes: 53 additions & 26 deletions pypesto/visualize/parameters.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,33 +376,9 @@ def handle_inputs(
# retrieve results
fvals = result.optimize_result.fval
xs = result.optimize_result.x
# retrieve inner parameters if available
inner_xs = [
res.get(INNER_PARAMETERS, None) for res in result.optimize_result.list
]

from ..hierarchical.calculator import HierarchicalAmiciCalculator

if (
any(inner_x is not None for inner_x in inner_xs)
and hasattr(result.problem.objective, 'calculator')
and isinstance(
inner_calculator := result.problem.objective.calculator,
HierarchicalAmiciCalculator,
)
):
inner_xs_names = inner_calculator.inner_problem.get_x_ids()
# replace None with a list of nans
inner_xs = [
[np.nan for i in range(len(inner_xs_names))]
if inner_xs_idx is None
else np.asarray(inner_xs_idx)
for inner_xs_idx in inner_xs
]
# set bounds for inner parameters
inner_lb, inner_ub = inner_calculator.inner_problem.get_bounds()
else:
inner_xs = None
# retrieve inner parameters if available
inner_xs, inner_xs_names, inner_lb, inner_ub = handle_inner_inputs(result)

# parse indices which should be plotted
if start_indices is not None:
Expand Down Expand Up @@ -454,6 +430,57 @@ def handle_inputs(
return lb, ub, x_labels, fvals_out, xs_out


def handle_inner_inputs(
result: Result,
):
"""Handle inner parameters if available.
Parameters
----------
result:
Optimization result obtained by 'optimize.py'.
Returns
-------
inner_xs:
Inner parameter values which will be appended to xs.
inner_xs_names:
Inner parameter names.
inner_lb:
Inner parameter lower bounds.
inner_ub:
Inner parameter upper bounds.
"""
inner_xs = [
res.get(INNER_PARAMETERS, None) for res in result.optimize_result.list
]

if any(inner_x is not None for inner_x in inner_xs):
from ..hierarchical.calculator import HierarchicalAmiciCalculator

if hasattr(result.problem.objective, 'calculator') and isinstance(
inner_calculator := result.problem.objective.calculator,
HierarchicalAmiciCalculator,
):
inner_xs_names = inner_calculator.inner_problem.get_x_ids()
# replace None with a list of nans
inner_xs = [
[np.nan for i in range(len(inner_xs_names))]
if inner_xs_idx is None
else np.asarray(inner_xs_idx)
for inner_xs_idx in inner_xs
]
# set bounds for inner parameters
inner_lb, inner_ub = inner_calculator.inner_problem.get_bounds()
else:
inner_xs = None
inner_xs_names = None
inner_lb = None
inner_ub = None

return inner_xs, inner_xs_names, inner_lb, inner_ub


def parameters_correlation_matrix(
result: Result,
parameter_indices: Union[str, Sequence[int]] = 'free_only',
Expand Down
2 changes: 1 addition & 1 deletion test/visualize/test_visualize.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,7 @@ def test_parameters_hist():
@pytest.mark.parametrize("scale_to_interval", [None, (0, 1)])
@close_fig
def test_parameters_hierarchical(scale_to_interval):
# bbtain a petab problem with hierarchical parameters
# obtain a petab problem with hierarchical parameters
petab_problem = (
get_Boehm_JProteomeRes2014_hierarchical_petab_corrected_bounds()
)
Expand Down

0 comments on commit c53407e

Please sign in to comment.