diff --git a/doc/source/conf.py b/doc/source/conf.py index 3f14dcfa2..2a4ff7258 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -42,6 +42,8 @@ MOCK_MODULES = [ "gym.spaces", "horovod", "horovod.ray", + "hyperopt", + "hyperopt.hp" "kubernetes", "mlflow", "modin", diff --git a/doc/source/tune/examples/hyperopt_conditional_search_space_example.rst b/doc/source/tune/examples/hyperopt_conditional_search_space_example.rst new file mode 100644 index 000000000..76729c021 --- /dev/null +++ b/doc/source/tune/examples/hyperopt_conditional_search_space_example.rst @@ -0,0 +1,8 @@ +:orphan: + +hyperopt_conditional_search_space_example +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +.. literalinclude:: /../../python/ray/tune/examples/hyperopt_conditional_search_space_example.py \ No newline at end of file diff --git a/doc/source/tune/examples/index.rst b/doc/source/tune/examples/index.rst index c6dda149d..7d82062c1 100644 --- a/doc/source/tune/examples/index.rst +++ b/doc/source/tune/examples/index.rst @@ -37,6 +37,7 @@ Search Algorithm Examples - :doc:`/tune/examples/dragonfly_example`: Example script showing usage of :ref:`DragonflySearch ` [`Dragonfly website `__] - :doc:`/tune/examples/skopt_example`: Example script showing usage of :ref:`SkoptSearch ` [`Scikit-Optimize website `__] - :doc:`/tune/examples/hyperopt_example`: Example script showing usage of :ref:`HyperOptSearch ` [`HyperOpt website `__] +- :doc:`/tune/examples/hyperopt_conditional_search_space_example`: Example script showing usage of :ref:`HyperOptSearch ` [`HyperOpt website `__] with a conditional search space - :doc:`/tune/examples/bayesopt_example`: Example script showing usage of :ref:`BayesOptSearch ` [`BayesianOptimization website `__] - :doc:`/tune/examples/blendsearch_example`: Example script showing usage of :ref:`BlendSearch ` [`BlendSearch website `__] - :doc:`/tune/examples/cfo_example`: Example script showing usage of :ref:`CFO ` [`CFO website `__] diff --git a/python/ray/tune/BUILD b/python/ray/tune/BUILD index 06c67c75e..5853ba695 100644 --- a/python/ray/tune/BUILD +++ b/python/ray/tune/BUILD @@ -542,6 +542,16 @@ py_test( args = ["--smoke-test"] ) +py_test( + name = "hyperopt_conditional_search_space_example", + size = "medium", + srcs = ["examples/hyperopt_conditional_search_space_example.py"], + deps = [":tune_lib"], + tags = ["team:ml", "exclusive", "example"], + args = ["--smoke-test"] +) + + py_test( name = "lightgbm_example", size = "small", diff --git a/python/ray/tune/examples/README.rst b/python/ray/tune/examples/README.rst index 5c5c45260..2fa4529a3 100644 --- a/python/ray/tune/examples/README.rst +++ b/python/ray/tune/examples/README.rst @@ -70,3 +70,4 @@ Contributed Examples - `pbt_tune_cifar10_with_keras `__: A contributed example of tuning a Keras model on CIFAR10 with the PopulationBasedTraining scheduler. - `genetic_example `__: Optimizing the michalewicz function using the contributed GeneticSearch search algorithm with AsyncHyperBandScheduler. - `tune_cifar10_gluon `__: MXNet Gluon example to use Tune with the function-based API on CIFAR-10 dataset. +- `hyperopt_conditional_search_space_example `__: Conditional search space example using HyperOpt. diff --git a/python/ray/tune/examples/hyperopt_conditional_search_space_example.py b/python/ray/tune/examples/hyperopt_conditional_search_space_example.py new file mode 100644 index 000000000..22ca8694b --- /dev/null +++ b/python/ray/tune/examples/hyperopt_conditional_search_space_example.py @@ -0,0 +1,115 @@ +"""This example demonstrates the usage of conditional search spaces with Tune. + +It also checks that it is usable with a separate scheduler. + +For an example of using a Tune search space, see +:doc:`/tune/examples/optuna_example`. +""" +import time + +import ray +from ray import tune +from ray.tune.suggest import ConcurrencyLimiter +from ray.tune.schedulers import AsyncHyperBandScheduler +from ray.tune.suggest.hyperopt import HyperOptSearch +from hyperopt import hp + + +def f_unpack_dict(dct): + """ + Unpacks all sub-dictionaries in given dictionary recursively. + There should be no duplicated keys across all nested + subdictionaries, or some instances will be lost without warning + + Source: https://www.kaggle.com/fanvacoolt/tutorial-on-hyperopt + + Parameters: + ---------------- + dct : dictionary to unpack + + Returns: + ---------------- + : unpacked dictionary + """ + + res = {} + for (k, v) in dct.items(): + if isinstance(v, dict): + res = {**res, **f_unpack_dict(v)} + else: + res[k] = v + + return res + + +def evaluation_fn(step, width, height, mult=1): + return (0.1 + width * step / 100)**(-1) + height * 0.1 * mult + + +def easy_objective(config_in): + # Hyperparameters + config = f_unpack_dict(config_in) + width, height, mult = config["width"], config["height"], config.get( + "mult", 1) + print(config) + + for step in range(config["steps"]): + # Iterative training function - can be any arbitrary training procedure + intermediate_score = evaluation_fn(step, width, height, mult) + # Feed the score back back to Tune. + tune.report(iterations=step, mean_loss=intermediate_score) + time.sleep(0.1) + + +config_space = { + "activation": hp.choice("activation", [ + { + "activation": "relu", + "mult": hp.uniform("mult", 1, 2) + }, + { + "activation": "tanh" + }, + ]), + "width": hp.uniform("width", 0, 20), + "height": hp.uniform("heright", -100, 100), + "steps": 100 +} + + +def run_hyperopt_tune(config_dict=config_space, smoke_test=False): + algo = HyperOptSearch(space=config_dict, metric="mean_loss", mode="min") + algo = ConcurrencyLimiter(algo, max_concurrent=4) + scheduler = AsyncHyperBandScheduler() + analysis = tune.run( + easy_objective, + metric="mean_loss", + mode="min", + search_alg=algo, + scheduler=scheduler, + num_samples=10 if smoke_test else 100, + ) + + print("Best hyperparameters found were: ", analysis.best_config) + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument( + "--smoke-test", action="store_true", help="Finish quickly for testing") + parser.add_argument( + "--server-address", + type=str, + default=None, + required=False, + help="The address of server to connect to if using " + "Ray Client.") + args, _ = parser.parse_known_args() + if args.server_address is not None: + ray.util.connect(args.server_address) + else: + ray.init(configure_logging=False) + + run_hyperopt_tune(smoke_test=args.smoke_test)