Skip to content

Commit

Permalink
Fixes for long-term runs (#924)
Browse files Browse the repository at this point in the history
* The unlimited number of generations is configured for composer (to satisfy any timeout)
* The same for tuner's iterations
* Early stopping reason logging
  • Loading branch information
nicl-nno authored Oct 13, 2022
1 parent bba8bae commit eb89843
Show file tree
Hide file tree
Showing 9 changed files with 20 additions and 15 deletions.
6 changes: 3 additions & 3 deletions fedot/api/api_utils/api_composer.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@
from fedot.core.pipelines.tuning.tuner_builder import TunerBuilder
from fedot.core.pipelines.tuning.unified import PipelineTuner
from fedot.core.pipelines.verification import rules_by_task
from fedot.core.repository.operation_types_repository import get_operations_for_task
from fedot.core.repository.pipeline_operation_repository import PipelineOperationRepository
from fedot.core.repository.quality_metrics_repository import MetricsRepository, MetricType, MetricsEnum
from fedot.core.repository.tasks import Task, TaskTypesEnum
Expand Down Expand Up @@ -197,8 +196,9 @@ def compose_fedot_model(self, api_params: dict, composer_params: dict, tuning_pa
assumption_handler.fit_assumption_and_check_correctness(initial_assumption[0],
pipelines_cache=self.pipelines_cache,
preprocessing_cache=self.preprocessing_cache)

self.log.message(
f'Initial pipeline was fitted in {round(self.timer.assumption_fit_spend_time.total_seconds())} sec.')
f'Initial pipeline was fitted in {round(self.timer.assumption_fit_spend_time.total_seconds(), 1)} sec.')

n_jobs = determine_n_jobs(api_params['n_jobs'])
self.preset_name = assumption_handler.propose_preset(preset, self.timer, n_jobs=n_jobs)
Expand Down Expand Up @@ -302,8 +302,8 @@ def tune_final_pipeline(self, task: Task,
if self.timer.have_time_for_tuning():
# Tune all nodes in the pipeline
with self.timer.launch_tuning():
self.log.message(f'Hyperparameters tuning started with {round(timeout_for_tuning)} sec. timeout')
self.was_tuned = False
self.log.message(f'Hyperparameters tuning started with {round(timeout_for_tuning)} min. timeout')
tuned_pipeline = tuner.tune(pipeline_gp_composed)
self.was_tuned = True
self.log.message('Hyperparameters tuning finished')
Expand Down
2 changes: 1 addition & 1 deletion fedot/api/api_utils/params.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def get_default_evo_params(problem: str):
params = {'max_depth': 6,
'max_arity': 3,
'pop_size': 20,
'num_of_generations': 100,
'num_of_generations': None,
'keep_n_best': 1,
'with_tuning': True,
'preset': AUTO_PRESET_NAME,
Expand Down
2 changes: 1 addition & 1 deletion fedot/core/constants.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from fedot.core.repository.tasks import TaskTypesEnum

MINIMAL_SECONDS_FOR_TUNING = 15
DEFAULT_TUNING_ITERATIONS_NUMBER = 1000
DEFAULT_TUNING_ITERATIONS_NUMBER = 100000
MAXIMAL_ATTEMPTS_NUMBER = 1000
EVALUATION_ATTEMPTS_NUMBER = 5
DEFAULT_API_TIMEOUT_MINUTES = 5.0
Expand Down
2 changes: 1 addition & 1 deletion fedot/core/optimisers/composer_requirements.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ class ComposerRequirements:
:param validation_blocks: number of validation blocks for time series validation
"""

num_of_generations: int = 20
num_of_generations: Optional[int] = None
timeout: Optional[datetime.timedelta] = datetime.timedelta(minutes=5)
early_stopping_generations: Optional[int] = 10

Expand Down
3 changes: 2 additions & 1 deletion fedot/core/optimisers/gp_comp/gp_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ def __init__(self,
def _initial_population(self, evaluator: Callable):
""" Initializes the initial population """
# Adding of initial assumptions to history as zero generation
self._update_population(evaluator(self.initial_individuals))
evaluation_result = evaluator(self.initial_individuals)
self._update_population(evaluation_result)

if len(self.initial_individuals) < self.graph_optimizer_params.pop_size:
self.initial_individuals = self._extend_population(self.initial_individuals)
Expand Down
2 changes: 1 addition & 1 deletion fedot/core/optimisers/populational_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self,
# early_stopping_generations may be None, so use some obvious max number
max_stagnation_length = requirements.early_stopping_generations or requirements.num_of_generations
self.stop_optimization = \
GroupedCondition().add_condition(
GroupedCondition(results_as_message=True).add_condition(
lambda: self.timer.is_time_limit_reached(self.current_generation_num),
'Optimisation stopped: Time limit is reached'
).add_condition(
Expand Down
4 changes: 2 additions & 2 deletions fedot/core/pipelines/tuning/tuner_builder.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from datetime import timedelta
from typing import Callable, ClassVar, Type
from typing import Callable, Type, Union

from hyperopt import tpe

Expand Down Expand Up @@ -66,7 +66,7 @@ def with_timeout(self, timeout: timedelta):
self.timeout = timeout
return self

def with_eval_time_constraint(self, eval_time_constraint: timedelta):
def with_eval_time_constraint(self, eval_time_constraint: Union[timedelta, int, float]):
self.eval_time_constraint = eval_time_constraint
return self

Expand Down
8 changes: 6 additions & 2 deletions fedot/core/utilities/grouped_condition.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@ class GroupedCondition:
if any of the composed conditions is True. The message corresponding
to the actual fired condition is logged (if it was provided)."""

def __init__(self, conditions_reduce: Callable[[Iterable[bool]], bool] = any):
def __init__(self, conditions_reduce: Callable[[Iterable[bool]], bool] = any, results_as_message: bool = False):
self._reduce = conditions_reduce
self._conditions: List[ConditionEntryType] = []
self._log = default_log(self)
self._results_as_message = results_as_message

def add_condition(self, condition: ConditionType, log_msg: Optional[str] = None) -> 'GroupedCondition':
"""Builder-like method for adding conditions."""
Expand All @@ -34,5 +35,8 @@ def _check_condition(self, entry: ConditionEntryType) -> bool:
cond, msg = entry
res = cond()
if res and msg:
self._log.info(msg)
if self._results_as_message:
self._log.message(msg)
else:
self._log.info(msg)
return res
6 changes: 3 additions & 3 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Data
numpy>=1.21.*
numpy>=1.16.*
pandas>=1.1.0, <1.3.0; python_version == '3.7'
pandas>=1.3.0; python_version >='3.8'

Expand All @@ -13,10 +13,10 @@ ete3>=3.1.*
networkx>=2.4, !=2.7.*, !=2.8.1, !=2.8.2, !=2.8.3
scikit_learn
scikit_learn>=1.0.0; python_version == '3.7'
scikit_learn>=1.1.0; python_version >= '3.8'
scikit_learn>=1.0.0; python_version >= '3.8'

# Analysis and optimizations
hyperopt==0.2.5
hyperopt==0.2.7
SALib>=1.3.*
scikit-optimize>=0.7.4

Expand Down

0 comments on commit eb89843

Please sign in to comment.