[tune] Conditional search space example using hyperopt (#18130)

Co-authored-by: Ryan Melvin <rmelvin@uabmc.edu>
Co-authored-by: Antoni Baum <antoni.baum@protonmail.com>
This commit is contained in:
Ryan L. Melvin 2021-08-31 10:06:22 -05:00 committed by GitHub
parent a8dbc44f9a
commit c081c68de7
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
6 changed files with 137 additions and 0 deletions

View file

@ -42,6 +42,8 @@ MOCK_MODULES = [
"gym.spaces", "gym.spaces",
"horovod", "horovod",
"horovod.ray", "horovod.ray",
"hyperopt",
"hyperopt.hp"
"kubernetes", "kubernetes",
"mlflow", "mlflow",
"modin", "modin",

View file

@ -0,0 +1,8 @@
:orphan:
hyperopt_conditional_search_space_example
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. literalinclude:: /../../python/ray/tune/examples/hyperopt_conditional_search_space_example.py

View file

@ -37,6 +37,7 @@ Search Algorithm Examples
- :doc:`/tune/examples/dragonfly_example`: Example script showing usage of :ref:`DragonflySearch <Dragonfly>` [`Dragonfly website <https://dragonfly-opt.readthedocs.io/>`__] - :doc:`/tune/examples/dragonfly_example`: Example script showing usage of :ref:`DragonflySearch <Dragonfly>` [`Dragonfly website <https://dragonfly-opt.readthedocs.io/>`__]
- :doc:`/tune/examples/skopt_example`: Example script showing usage of :ref:`SkoptSearch <skopt>` [`Scikit-Optimize website <https://scikit-optimize.github.io>`__] - :doc:`/tune/examples/skopt_example`: Example script showing usage of :ref:`SkoptSearch <skopt>` [`Scikit-Optimize website <https://scikit-optimize.github.io>`__]
- :doc:`/tune/examples/hyperopt_example`: Example script showing usage of :ref:`HyperOptSearch <tune-hyperopt>` [`HyperOpt website <http://hyperopt.github.io/hyperopt>`__] - :doc:`/tune/examples/hyperopt_example`: Example script showing usage of :ref:`HyperOptSearch <tune-hyperopt>` [`HyperOpt website <http://hyperopt.github.io/hyperopt>`__]
- :doc:`/tune/examples/hyperopt_conditional_search_space_example`: Example script showing usage of :ref:`HyperOptSearch <tune-hyperopt>` [`HyperOpt website <http://hyperopt.github.io/hyperopt>`__] with a conditional search space
- :doc:`/tune/examples/bayesopt_example`: Example script showing usage of :ref:`BayesOptSearch <bayesopt>` [`BayesianOptimization website <https://github.com/fmfn/BayesianOptimization>`__] - :doc:`/tune/examples/bayesopt_example`: Example script showing usage of :ref:`BayesOptSearch <bayesopt>` [`BayesianOptimization website <https://github.com/fmfn/BayesianOptimization>`__]
- :doc:`/tune/examples/blendsearch_example`: Example script showing usage of :ref:`BlendSearch <BlendSearch>` [`BlendSearch website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__] - :doc:`/tune/examples/blendsearch_example`: Example script showing usage of :ref:`BlendSearch <BlendSearch>` [`BlendSearch website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]
- :doc:`/tune/examples/cfo_example`: Example script showing usage of :ref:`CFO <CFO>` [`CFO website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__] - :doc:`/tune/examples/cfo_example`: Example script showing usage of :ref:`CFO <CFO>` [`CFO website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]

View file

@ -542,6 +542,16 @@ py_test(
args = ["--smoke-test"] args = ["--smoke-test"]
) )
py_test(
name = "hyperopt_conditional_search_space_example",
size = "medium",
srcs = ["examples/hyperopt_conditional_search_space_example.py"],
deps = [":tune_lib"],
tags = ["team:ml", "exclusive", "example"],
args = ["--smoke-test"]
)
py_test( py_test(
name = "lightgbm_example", name = "lightgbm_example",
size = "small", size = "small",

View file

@ -70,3 +70,4 @@ Contributed Examples
- `pbt_tune_cifar10_with_keras <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py>`__: A contributed example of tuning a Keras model on CIFAR10 with the PopulationBasedTraining scheduler. - `pbt_tune_cifar10_with_keras <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/pbt_tune_cifar10_with_keras.py>`__: A contributed example of tuning a Keras model on CIFAR10 with the PopulationBasedTraining scheduler.
- `genetic_example <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/genetic_example.py>`__: Optimizing the michalewicz function using the contributed GeneticSearch search algorithm with AsyncHyperBandScheduler. - `genetic_example <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/genetic_example.py>`__: Optimizing the michalewicz function using the contributed GeneticSearch search algorithm with AsyncHyperBandScheduler.
- `tune_cifar10_gluon <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/tune_cifar10_gluon.py>`__: MXNet Gluon example to use Tune with the function-based API on CIFAR-10 dataset. - `tune_cifar10_gluon <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/tune_cifar10_gluon.py>`__: MXNet Gluon example to use Tune with the function-based API on CIFAR-10 dataset.
- `hyperopt_conditional_search_space_example <https://github.com/ray-project/ray/blob/master/python/ray/tune/examples/hyperopt_conditional_search_space_example.py>`__: Conditional search space example using HyperOpt.

View file

@ -0,0 +1,115 @@
"""This example demonstrates the usage of conditional search spaces with Tune.
It also checks that it is usable with a separate scheduler.
For an example of using a Tune search space, see
:doc:`/tune/examples/optuna_example`.
"""
import time
import ray
from ray import tune
from ray.tune.suggest import ConcurrencyLimiter
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.hyperopt import HyperOptSearch
from hyperopt import hp
def f_unpack_dict(dct):
"""
Unpacks all sub-dictionaries in given dictionary recursively.
There should be no duplicated keys across all nested
subdictionaries, or some instances will be lost without warning
Source: https://www.kaggle.com/fanvacoolt/tutorial-on-hyperopt
Parameters:
----------------
dct : dictionary to unpack
Returns:
----------------
: unpacked dictionary
"""
res = {}
for (k, v) in dct.items():
if isinstance(v, dict):
res = {**res, **f_unpack_dict(v)}
else:
res[k] = v
return res
def evaluation_fn(step, width, height, mult=1):
return (0.1 + width * step / 100)**(-1) + height * 0.1 * mult
def easy_objective(config_in):
# Hyperparameters
config = f_unpack_dict(config_in)
width, height, mult = config["width"], config["height"], config.get(
"mult", 1)
print(config)
for step in range(config["steps"]):
# Iterative training function - can be any arbitrary training procedure
intermediate_score = evaluation_fn(step, width, height, mult)
# Feed the score back back to Tune.
tune.report(iterations=step, mean_loss=intermediate_score)
time.sleep(0.1)
config_space = {
"activation": hp.choice("activation", [
{
"activation": "relu",
"mult": hp.uniform("mult", 1, 2)
},
{
"activation": "tanh"
},
]),
"width": hp.uniform("width", 0, 20),
"height": hp.uniform("heright", -100, 100),
"steps": 100
}
def run_hyperopt_tune(config_dict=config_space, smoke_test=False):
algo = HyperOptSearch(space=config_dict, metric="mean_loss", mode="min")
algo = ConcurrencyLimiter(algo, max_concurrent=4)
scheduler = AsyncHyperBandScheduler()
analysis = tune.run(
easy_objective,
metric="mean_loss",
mode="min",
search_alg=algo,
scheduler=scheduler,
num_samples=10 if smoke_test else 100,
)
print("Best hyperparameters found were: ", analysis.best_config)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
parser.add_argument(
"--server-address",
type=str,
default=None,
required=False,
help="The address of server to connect to if using "
"Ray Client.")
args, _ = parser.parse_known_args()
if args.server_address is not None:
ray.util.connect(args.server_address)
else:
ray.init(configure_logging=False)
run_hyperopt_tune(smoke_test=args.smoke_test)