diff --git a/README.md b/README.md index 860616cd..6c7e23b9 100644 --- a/README.md +++ b/README.md @@ -19,9 +19,10 @@ SIATune is an open-source deep learning model hyperparameter tuning toolbox espe - **Support hyperparameter search algorithms** We provide hyperparameter search algorithms such as below; - - [x] [Nevergrad](https://github.com/facebookresearch/nevergrad) - - [x] [HyperOpt](https://github.com/hyperopt/hyperopt) - [x] [FLAML](https://github.com/microsoft/FLAML) + - [x] [HyperOpt](https://github.com/hyperopt/hyperopt) + - [x] [Nevergrad](https://github.com/facebookresearch/nevergrad) + - [x] [Optuna](https://github.com/optuna/optuna) - [ ] [Adaptive Experimentation (AX)](https://ax.dev/) - [ ] [Scikit-optimize](https://github.com/scikit-optimize/scikit-optimize) diff --git a/configs/_base_/searcher/optuna.py b/configs/_base_/searcher/optuna.py new file mode 100644 index 00000000..8b712131 --- /dev/null +++ b/configs/_base_/searcher/optuna.py @@ -0,0 +1 @@ +searcher = dict(type='OptunaSearch') diff --git a/tests/test_hyper_optim/test_searchers.py b/tests/test_hyper_optim/test_searchers.py index 0eef59fe..ae5a5a79 100644 --- a/tests/test_hyper_optim/test_searchers.py +++ b/tests/test_hyper_optim/test_searchers.py @@ -89,3 +89,13 @@ def test_nevergrad(trainable, config): dict(type='NevergradSearch', optimizer='PSO', budget=1)), num_samples=2, config=config) + + +def test_optuna(trainable, config): + tune.run( + trainable, + metric='mean_loss', + mode='min', + search_alg=build_searcher(dict(type='OptunaSearch')), + num_samples=2, + config=config)