Skip to content

Commit

Permalink
[Feature] Support HyperBand and BOHB scheduler (#101)
Browse files Browse the repository at this point in the history
* Bump ray from 1.9.1 to 2.1.0

Signed-off-by: Junhwa Song <[email protected]>
  • Loading branch information
KKIEEK authored Dec 19, 2022
1 parent 54dc8f1 commit 835efa1
Show file tree
Hide file tree
Showing 17 changed files with 57 additions and 63 deletions.
9 changes: 5 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,12 @@ SIATune is an open-source deep learning model hyperparameter tuning toolbox espe
- **Schedule multiple experiments**

Various scheduling techniques are supported to efficiently manage many experiments.
- [x] [AsyncHyperBandScheduler](https://arxiv.org/abs/1810.05934)
- [ ] [PopulationBasedTraining](https://www.deepmind.com/blog/population-based-training-of-neural-networks)
- [ ] [MedianStoppingRule](https://research.google.com/pubs/pub46180.html)
- [x] [Asynchronous HyperBand](https://arxiv.org/abs/1810.05934)
- [x] [HyperBand](https://arxiv.org/abs/1603.06560)
- [ ] [Median Stopping Rule](https://research.google.com/pubs/pub46180.html)
- [ ] [Population Based Training](https://www.deepmind.com/blog/population-based-training-of-neural-networks)
- [ ] [Population Based Bandits](https://arxiv.org/abs/2002.02518)
- [ ] [HyperBandScheduler](https://arxiv.org/abs/1603.06560)
- [x] [Bayesian Optimization and HyperBand](https://arxiv.org/abs/1807.01774)


- **Distributed tuning system based on Ray**
Expand Down
2 changes: 2 additions & 0 deletions configs/_base_/scheduler/bohb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
trial_scheduler = dict(
type='HyperBandForBOHB', time_attr='training_iteration', max_t=20)
2 changes: 2 additions & 0 deletions configs/_base_/scheduler/hb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
trial_scheduler = dict(
type='HyperBandScheduler', time_attr='training_iteration', max_t=20)
1 change: 1 addition & 0 deletions configs/_base_/searcher/bohb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
searcher = dict(type='TuneBOHB')
5 changes: 1 addition & 4 deletions configs/_base_/searcher/nevergrad_oneplusone.py
Original file line number Diff line number Diff line change
@@ -1,4 +1 @@
searcher = dict(
type='NevergradSearch',
budget=256,
)
searcher = dict(type='NevergradSearch', budget=256)
6 changes: 1 addition & 5 deletions configs/_base_/searcher/nevergrad_pso.py
Original file line number Diff line number Diff line change
@@ -1,5 +1 @@
searcher = dict(
type='NevergradSearch',
optimizer='PSO',
budget=256,
)
searcher = dict(type='NevergradSearch', optimizer='PSO', budget=256)
2 changes: 2 additions & 0 deletions requirements/optional.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
bayesian-optimization==1.2.0
ConfigSpace
flaml==1.0.14
hpbandster
hyperopt==0.2.5
mlflow==1.23.1
nevergrad==0.4.3.post7
Expand Down
4 changes: 2 additions & 2 deletions siatune/ray/schedulers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Copyright (c) SI-Analytics. All rights reserved.
from .builder import SCHEDULERS, build_scheduler
from .builder import TRIAL_SCHEDULERS, build_scheduler
from .pbt import PopulationBasedTraining

__all__ = ['SCHEDULERS', 'build_scheduler', 'PopulationBasedTraining']
__all__ = ['TRIAL_SCHEDULERS', 'build_scheduler', 'PopulationBasedTraining']
9 changes: 5 additions & 4 deletions siatune/ray/schedulers/builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@

from mmcv.utils import Config, Registry
from ray import tune
from ray.tune.schedulers import TrialScheduler

SCHEDULERS = Registry('schedulers')
TRIAL_SCHEDULERS = Registry('trial scheduler')
for v in set(tune.schedulers.SCHEDULER_IMPORT.values()):
if not inspect.isclass(v):
continue
SCHEDULERS.register_module(module=v)
TRIAL_SCHEDULERS.register_module(module=v)


def build_scheduler(cfg: Config) -> tune.schedulers.TrialScheduler:
def build_scheduler(cfg: Config) -> TrialScheduler:
"""Build the scheduler from configs.
Args:
Expand All @@ -20,4 +21,4 @@ def build_scheduler(cfg: Config) -> tune.schedulers.TrialScheduler:
tune.schedulers.TrialScheduler: The scheduler.
"""

return SCHEDULERS.build(cfg)
return TRIAL_SCHEDULERS.build(cfg)
4 changes: 2 additions & 2 deletions siatune/ray/schedulers/pbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
PopulationBasedTraining as _PopulationBasedTraining
from ray.tune.search.sample import Domain

from siatune.ray.schedulers import SCHEDULERS
from siatune.ray.schedulers import TRIAL_SCHEDULERS
from siatune.ray.spaces import build_space
from siatune.utils import ImmutableContainer

Expand Down Expand Up @@ -50,7 +50,7 @@ def explore(
return new_config


@SCHEDULERS.register_module(force=True)
@TRIAL_SCHEDULERS.register_module(force=True)
class PopulationBasedTraining(_PopulationBasedTraining):

def __init__(self, *args, **kwargs) -> None:
Expand Down
7 changes: 1 addition & 6 deletions siatune/ray/searchers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
# Copyright (c) SI-Analytics. All rights reserved.
from .builder import SEARCHERS, build_searcher
from .flaml import BlendSearch, CFOSearch
from .hyperopt import HyperOptSearch
from .nevergrad import NevergradSearch

__all__ = [
'SEARCHERS', 'build_searcher', 'BlendSearch', 'CFOSearch',
'HyperOptSearch', 'NevergradSearch'
]
__all__ = ['SEARCHERS', 'build_searcher', 'NevergradSearch']
4 changes: 4 additions & 0 deletions siatune/ray/searchers/builder.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,12 @@
# Copyright (c) SI-Analytics. All rights reserved.

from mmcv.utils import Config, Registry
from ray import tune
from ray.tune.search import Searcher

SEARCHERS = Registry('searchers')
for func in set(tune.search.SEARCH_ALG_IMPORT.values()):
SEARCHERS.register_module(module=func())


def build_searcher(cfg: Config) -> Searcher:
Expand Down
15 changes: 0 additions & 15 deletions siatune/ray/searchers/flaml.py

This file was deleted.

9 changes: 0 additions & 9 deletions siatune/ray/searchers/hyperopt.py

This file was deleted.

6 changes: 3 additions & 3 deletions siatune/ray/searchers/nevergrad.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
optimizer_registry = dict()


@SEARCHERS.register_module()
@SEARCHERS.register_module(force=True)
class NevergradSearch(_NevergradSearch):
"""Search with Nevergrad."""

Expand Down Expand Up @@ -104,8 +104,8 @@ def _setup_nevergrad(self) -> None:
if len(self._nevergrad_opt.instrumentation.args) != 1:
raise ValueError(
'Instrumented optimizers should use kwargs only')
if self._parameters is not None and \
self._nevergrad_opt.dimension != len(self._parameters):
if self._parameters is not None and (self._nevergrad_opt.dimension !=
len(self._parameters)):
raise ValueError('len(parameters_names) must match optimizer '
'dimension for non-instrumented optimizers')

Expand Down
4 changes: 2 additions & 2 deletions tests/test_ray/test_schedulers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
from siatune.ray.schedulers import SCHEDULERS, build_scheduler
from siatune.ray.schedulers import TRIAL_SCHEDULERS, build_scheduler


def test_build_schedulers():

@SCHEDULERS.register_module()
@TRIAL_SCHEDULERS.register_module()
class TestScheduler:
pass

Expand Down
31 changes: 24 additions & 7 deletions tests/test_ray/test_searchers.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
import pytest
from ray import tune

from siatune.ray.searchers import (SEARCHERS, BlendSearch, CFOSearch,
HyperOptSearch, NevergradSearch,
build_searcher)
from siatune.ray.searchers import SEARCHERS, build_searcher


def test_build_searcher():
Expand Down Expand Up @@ -39,7 +37,17 @@ def test_blend(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=BlendSearch(),
search_alg=build_searcher(dict(type='BlendSearch')),
num_samples=2,
config=config)


def test_bohb(trainable, config):
tune.run(
trainable,
metric='mean_loss',
mode='min',
search_alg=build_searcher(dict(type='TuneBOHB')),
num_samples=2,
config=config)

Expand All @@ -49,7 +57,7 @@ def test_cfo(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=CFOSearch(),
search_alg=build_searcher(dict(type='CFO')),
num_samples=2,
config=config)

Expand All @@ -59,7 +67,7 @@ def test_hyperopt(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=HyperOptSearch(),
search_alg=build_searcher(dict(type='HyperOptSearch')),
num_samples=2,
config=config)

Expand All @@ -69,6 +77,15 @@ def test_nevergrad(trainable, config):
trainable,
metric='mean_loss',
mode='min',
search_alg=NevergradSearch(optimizer='PSO', budget=2),
search_alg=build_searcher(dict(type='NevergradSearch', budget=1)),
num_samples=2,
config=config)

tune.run(
trainable,
metric='mean_loss',
mode='min',
search_alg=build_searcher(
dict(type='NevergradSearch', optimizer='PSO', budget=1)),
num_samples=2,
config=config)

0 comments on commit 835efa1

Please sign in to comment.