[Tune] Add new searchers from FLAML (#16329)

This commit is contained in:
Qingyun Wu 2021-06-12 05:10:51 -04:00 committed by GitHub
parent 59f639f9db
commit dae3ac1def
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
13 changed files with 333 additions and 1 deletions

View file

@ -30,6 +30,14 @@ Summary
- Bayesian/Bandit Optimization
- [`Ax <https://ax.dev/>`__]
- :doc:`/tune/examples/ax_example`
* - :ref:`BlendSearch <BlendSearch>`
- Blended Search
- [`Bs <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]
- :doc:`/tune/examples/blendsearch_example`
* - :ref:`CFO <CFO>`
- Cost-Frugal hyperparameter Optimization
- [`Cfo <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]
- :doc:`/tune/examples/cfo_example`
* - :ref:`DragonflySearch <Dragonfly>`
- Scalable Bayesian Optimization
- [`Dragonfly <https://dragonfly-opt.readthedocs.io/>`__]
@ -169,7 +177,7 @@ Bayesian Optimization (tune.suggest.bayesopt.BayesOptSearch)
BOHB (tune.suggest.bohb.TuneBOHB)
---------------------------------
BOHB (Bayesian Optimization HyperBand) is an algorithm that both terminates bad trials and also uses Bayesian Optimization to improve the hyperparameter search. It is backed by the `HpBandSter library <https://github.com/automl/HpBandSter>`_.
BOHB (Bayesian Optimization HyperBand) is an algorithm that both terminates bad trials and also uses Bayesian Optimization to improve the hyperparameter search. It is available from the `HpBandSter library <https://github.com/automl/HpBandSter>`_.
Importantly, BOHB is intended to be paired with a specific scheduler class: :ref:`HyperBandForBOHB <tune-scheduler-bohb>`.
@ -183,6 +191,42 @@ See the `BOHB paper <https://arxiv.org/abs/1807.01774>`_ for more details.
.. autoclass:: ray.tune.suggest.bohb.TuneBOHB
.. _BlendSearch:
BlendSearch (tune.suggest.flaml.BlendSearch)
--------------------------------------------
BlendSearch is an economical hyperparameter optimization algorithm that combines combines local search with global search. It is backed by the `FLAML library <https://github.com/microsoft/FLAML>`_.
It allows the users to specify a low-cost initial point as input if such point exists.
In order to use this search algorithm, you will need to install ``flaml``:
.. code-block:: bash
$ pip install 'flaml[blendsearch]'
See the `BlendSearch paper <https://openreview.net/pdf?id=VbLH04pRA3>`_ and documentation in FLAML `BlendSearch documentation <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`_ for more details.
.. autoclass:: ray.tune.suggest.flaml.BlendSearch
.. _CFO:
CFO (tune.suggest.flaml.CFO)
----------------------------
CFO (Cost-Frugal hyperparameter Optimization) is a hyperparameter search algorithm based on randomized local search. It is backed by the `FLAML library <https://github.com/microsoft/FLAML>`_.
It allows the users to specify a low-cost initial point as input if such point exists.
In order to use this search algorithm, you will need to install ``flaml``:
.. code-block:: bash
$ pip install flaml
See the `CFO paper <https://arxiv.org/pdf/2005.01571.pdf>`_ and documentation in FLAML `CFO documentation <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`_ for more details.
.. autoclass:: ray.tune.suggest.flaml.CFO
.. _Dragonfly:
Dragonfly (tune.suggest.dragonfly.DragonflySearch)

View file

@ -0,0 +1,6 @@
:orphan:
blendsearch_example
~~~~~~~~~~~~~~~~~~~
.. literalinclude:: /../../python/ray/tune/examples/blendsearch_example.py

View file

@ -0,0 +1,6 @@
:orphan:
cfo_example
~~~~~~~~~~~
.. literalinclude:: /../../python/ray/tune/examples/cfo_example.py

View file

@ -38,6 +38,8 @@ Search Algorithm Examples
- :doc:`/tune/examples/skopt_example`: Example script showing usage of :ref:`SkoptSearch <skopt>` [`Scikit-Optimize website <https://scikit-optimize.github.io>`__]
- :doc:`/tune/examples/hyperopt_example`: Example script showing usage of :ref:`HyperOptSearch <tune-hyperopt>` [`HyperOpt website <http://hyperopt.github.io/hyperopt>`__]
- :doc:`/tune/examples/bayesopt_example`: Example script showing usage of :ref:`BayesOptSearch <bayesopt>` [`BayesianOptimization website <https://github.com/fmfn/BayesianOptimization>`__]
- :doc:`/tune/examples/blendsearch_example`: Example script showing usage of :ref:`BlendSearch <BlendSearch>` [`BlendSearch website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]
- :doc:`/tune/examples/cfo_example`: Example script showing usage of :ref:`BlendSearch <CFO>` [`CFO website <https://github.com/microsoft/FLAML/tree/main/flaml/tune>`__]
- :doc:`/tune/examples/bohb_example`: Example script showing usage of :ref:`TuneBOHB <suggest-TuneBOHB>` [`BOHB website <https://github.com/automl/HpBandSter>`__]
- :doc:`/tune/examples/nevergrad_example`: Example script showing usage of :ref:`NevergradSearch <nevergrad>` [`Nevergrad website <https://github.com/facebookresearch/nevergrad>`__]
- :doc:`/tune/examples/optuna_example`: Example script showing usage of :ref:`OptunaSearch <tune-optuna>` [`Optuna website <https://optuna.org/>`__]

View file

@ -374,6 +374,15 @@ py_test(
args = ["--smoke-test"]
)
py_test(
name = "blendsearch_example",
size = "small",
srcs = ["examples/blendsearch_example.py"],
deps = [":tune_lib"],
tags = ["exclusive", "example"],
args = ["--smoke-test"]
)
py_test(
name = "bohb_example",
size = "small",
@ -382,6 +391,15 @@ py_test(
tags = ["exclusive", "example"]
)
py_test(
name = "cfo_example",
size = "small",
srcs = ["examples/cfo_example.py"],
deps = [":tune_lib"],
tags = ["exclusive", "example"],
args = ["--smoke-test"]
)
py_test(
name = "cifar10_pytorch",
size = "medium",

View file

@ -0,0 +1,101 @@
"""This example demonstrates the usage of BlendSearch with Ray Tune.
It also checks that it is usable with a separate scheduler.
"""
import time
import ray
from ray import tune
from ray.tune.suggest import ConcurrencyLimiter
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.flaml import BlendSearch
def evaluation_fn(step, width, height):
return (0.1 + width * step / 100)**(-1) + height * 0.1
def easy_objective(config):
# Hyperparameters
width, height = config["width"], config["height"]
for step in range(config["steps"]):
# Iterative training function - can be any arbitrary training procedure
intermediate_score = evaluation_fn(step, width, height)
# Feed the score back back to Tune.
tune.report(iterations=step, mean_loss=intermediate_score)
time.sleep(0.1)
def run_blendsearch_tune(smoke_test=False):
algo = BlendSearch()
algo = ConcurrencyLimiter(algo, max_concurrent=4)
scheduler = AsyncHyperBandScheduler()
analysis = tune.run(
easy_objective,
metric="mean_loss",
mode="min",
search_alg=algo,
scheduler=scheduler,
num_samples=10 if smoke_test else 100,
config={
"steps": 100,
"width": tune.uniform(0, 20),
"height": tune.uniform(-100, 100),
# This is an ignored parameter.
"activation": tune.choice(["relu", "tanh"])
})
print("Best hyperparameters found were: ", analysis.best_config)
def run_blendsearch_tune_w_budget(time_budget_s=10):
"""run BlendSearch with given time_budget_s
"""
algo = BlendSearch(
metric="mean_loss",
mode="min",
space={
"width": tune.uniform(0, 20),
"height": tune.uniform(-100, 100),
"activation": tune.choice(["relu", "tanh"])
})
algo.set_search_properties(config={"time_budget_s": time_budget_s})
algo = ConcurrencyLimiter(algo, max_concurrent=4)
scheduler = AsyncHyperBandScheduler()
analysis = tune.run(
easy_objective,
metric="mean_loss",
mode="min",
search_alg=algo,
scheduler=scheduler,
time_budget_s=time_budget_s,
num_samples=-1,
config={
"steps": 100,
})
print("Best hyperparameters found were: ", analysis.best_config)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
parser.add_argument(
"--server-address",
type=str,
default=None,
required=False,
help="The address of server to connect to if using "
"Ray Client.")
args, _ = parser.parse_known_args()
if args.server_address is not None:
ray.util.connect(args.server_address)
else:
ray.init(configure_logging=False)
run_blendsearch_tune_w_budget(time_budget_s=30)
run_blendsearch_tune(smoke_test=args.smoke_test)

View file

@ -0,0 +1,71 @@
"""This example demonstrates the usage of CFO with Ray Tune.
It also checks that it is usable with a separate scheduler.
"""
import time
import ray
from ray import tune
from ray.tune.suggest import ConcurrencyLimiter
from ray.tune.schedulers import AsyncHyperBandScheduler
from ray.tune.suggest.flaml import CFO
def evaluation_fn(step, width, height):
return (0.1 + width * step / 100)**(-1) + height * 0.1
def easy_objective(config):
# Hyperparameters
width, height = config["width"], config["height"]
for step in range(config["steps"]):
# Iterative training function - can be any arbitrary training procedure
intermediate_score = evaluation_fn(step, width, height)
# Feed the score back back to Tune.
tune.report(iterations=step, mean_loss=intermediate_score)
time.sleep(0.1)
def run_cfo_tune(smoke_test=False):
algo = CFO()
algo = ConcurrencyLimiter(algo, max_concurrent=4)
scheduler = AsyncHyperBandScheduler()
analysis = tune.run(
easy_objective,
metric="mean_loss",
mode="min",
search_alg=algo,
scheduler=scheduler,
num_samples=10 if smoke_test else 100,
config={
"steps": 100,
"width": tune.uniform(0, 20),
"height": tune.uniform(-100, 100),
# This is an ignored parameter.
"activation": tune.choice(["relu", "tanh"])
})
print("Best hyperparameters found were: ", analysis.best_config)
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
parser.add_argument(
"--server-address",
type=str,
default=None,
required=False,
help="The address of server to connect to if using "
"Ray Client.")
args, _ = parser.parse_known_args()
if args.server_address is not None:
ray.util.connect(args.server_address)
else:
ray.init(configure_logging=False)
run_cfo_tune(smoke_test=args.smoke_test)

View file

@ -37,6 +37,14 @@ def create_searcher(
from ray.tune.suggest.ax import AxSearch
return AxSearch
def _import_blendsearch_search():
from ray.tune.suggest.flaml import BlendSearch
return BlendSearch
def _import_cfo_search():
from ray.tune.suggest.flaml import CFO
return CFO
def _import_dragonfly_search():
from ray.tune.suggest.dragonfly import DragonflySearch
return DragonflySearch
@ -91,6 +99,8 @@ def create_searcher(
"zoopt": _import_zoopt_search,
"sigopt": _import_sigopt_search,
"hebo": _import_hebo_search,
"blendsearch": _import_blendsearch_search,
"cfo": _import_cfo_search,
}
search_alg = search_alg.lower()
if search_alg not in SEARCH_ALG_IMPORT:

View file

@ -0,0 +1,4 @@
try:
from flaml import BlendSearch, CFO
except ImportError:
BlendSearch = CFO = None

View file

@ -75,6 +75,24 @@ class ConvergenceTest(unittest.TestCase):
assert len(analysis.trials) < 50
assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-5)
def testConvergenceBlendSearch(self):
from ray.tune.suggest.flaml import BlendSearch
np.random.seed(0)
searcher = BlendSearch()
analysis = self._testConvergence(searcher, patience=10)
assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-5)
def testConvergenceCFO(self):
from ray.tune.suggest.flaml import CFO
np.random.seed(0)
searcher = CFO()
analysis = self._testConvergence(searcher, patience=10)
assert math.isclose(analysis.best_config["x"], 0, abs_tol=1e-5)
def testConvergenceDragonfly(self):
from ray.tune.suggest.dragonfly import DragonflySearch

View file

@ -80,6 +80,21 @@ class InvalidValuesTest(unittest.TestCase):
best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)
def testBlendSearch(self):
from ray.tune.suggest.flaml import BlendSearch
out = tune.run(
_invalid_objective,
search_alg=BlendSearch(),
config=self.config,
metric="_metric",
mode="max",
num_samples=8,
reuse_actors=False)
best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)
def testBOHB(self):
from ray.tune.suggest.bohb import TuneBOHB
@ -94,6 +109,21 @@ class InvalidValuesTest(unittest.TestCase):
best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)
def testCFO(self):
from ray.tune.suggest.flaml import CFO
out = tune.run(
_invalid_objective,
search_alg=CFO(),
config=self.config,
metric="_metric",
mode="max",
num_samples=8,
reuse_actors=False)
best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)
def testDragonfly(self):
from ray.tune.suggest.dragonfly import DragonflySearch

View file

@ -24,6 +24,7 @@ from ray.tune.suggest import ConcurrencyLimiter, Searcher
from ray.tune.suggest.hyperopt import HyperOptSearch
from ray.tune.suggest.dragonfly import DragonflySearch
from ray.tune.suggest.bayesopt import BayesOptSearch
from ray.tune.suggest.flaml import CFO
from ray.tune.suggest.skopt import SkOptSearch
from ray.tune.suggest.nevergrad import NevergradSearch
from ray.tune.suggest.optuna import OptunaSearch, param as ot_param
@ -537,6 +538,26 @@ class BayesoptWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
tune.run(cost, num_samples=10, search_alg=search_alg3, verbose=0)
class CFOWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
def set_basic_conf(self):
space = {
"height": tune.uniform(-100, 100),
"width": tune.randint(0, 100),
}
def cost(param, reporter):
reporter(loss=(param["height"] - 14)**2 - abs(param["width"] - 3))
search_alg = CFO(
space=space,
metric="loss",
mode="min",
seed=20,
)
return search_alg, cost
class SkoptWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
def set_basic_conf(self):
optimizer = skopt.Optimizer([(0, 20), (-100, 100)])

View file

@ -4,6 +4,7 @@ ax-platform==0.1.20; python_version >= '3.7'
bayesian-optimization==1.2.0
ConfigSpace==0.4.18
dragonfly-opt==0.1.6
flaml==0.5.2
gluoncv==0.10.1.post0
gpy==1.10.0
gym[atari]==0.18.0