[Tune] Added method to integrate previous analysis in BO (#8486)

This commit is contained in:
Luca Cappelletti 2020-05-20 08:26:43 +02:00 committed by GitHub
parent f8f7efc24f
commit c9898eff24
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 46 additions and 5 deletions

View file

@ -38,6 +38,8 @@ class BayesOptSearch(Searcher):
- kappa: 2.576
- xi: 0.0
random_state (int): Used to initialize BayesOpt.
analysis (ExperimentAnalysis): Optionally, the previous analysis
to integrate.
verbose (int): Sets verbosity level for BayesOpt packages.
max_concurrent: Deprecated.
use_early_stopped_trials: Deprecated.
@ -64,8 +66,27 @@ class BayesOptSearch(Searcher):
utility_kwargs=None,
random_state=1,
verbose=0,
analysis=None,
max_concurrent=None,
use_early_stopped_trials=None):
"""Instantiate new BayesOptSearch object.
Parameters:
space (dict): Continuous search space.
Parameters will be sampled from
this space which will be used to run trials.
metric (str): The training result objective value attribute.
mode (str): One of {min, max}. Determines whether objective is
minimizing or maximizing the metric attribute.
utility_kwargs (dict): Parameters to define the utility function.
Must provide values for the keys `kind`, `kappa`, and `xi`.
random_state (int): Used to initialize BayesOpt.
analysis (ExperimentAnalysis): Optionally, the previous analysis
to integrate.
verbose (int): Sets verbosity level for BayesOpt packages.
max_concurrent: Deprecated.
use_early_stopped_trials: Deprecated.
"""
assert byo is not None, (
"BayesOpt must be installed!. You can install BayesOpt with"
" the command: `pip install bayesian-optimization`.")
@ -96,6 +117,8 @@ class BayesOptSearch(Searcher):
f=None, pbounds=space, verbose=verbose, random_state=random_state)
self.utility = byo.UtilityFunction(**utility_kwargs)
if analysis is not None:
self.register_analysis(analysis)
def suggest(self, trial_id):
if self.max_concurrent:
@ -107,6 +130,21 @@ class BayesOptSearch(Searcher):
return copy.deepcopy(new_trial)
def register_analysis(self, analysis):
"""Integrate the given analysis into the gaussian process.
Parameters
------------------
analysis (ExperimentAnalysis): Optionally, the previous analysis
to integrate.
"""
for (_, report), params in zip(analysis.dataframe().iterrows(),
analysis.get_all_configs().values()):
# We add the obtained results to the
# gaussian process optimizer
self.optimizer.register(
params=params, target=self._metric_op * report[self._metric])
def on_trial_complete(self, trial_id, result=None, error=False):
"""Notification for the completion of trial."""
if result:

View file

@ -202,19 +202,22 @@ class HyperoptWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
class BayesoptWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
def set_basic_conf(self):
def set_basic_conf(self, analysis=None):
space = {"width": (0, 20), "height": (-100, 100)}
def cost(space, reporter):
reporter(loss=(space["height"] - 14)**2 - abs(space["width"] - 3))
search_alg = BayesOptSearch(
space,
metric="loss",
mode="min",
)
space, metric="loss", mode="min", analysis=analysis)
return search_alg, cost
def testBootStrapAnalysis(self):
analysis = self.run_exp_3()
search_alg3, cost = self.set_basic_conf(analysis)
search_alg3 = ConcurrencyLimiter(search_alg3, 1)
tune.run(cost, num_samples=10, search_alg=search_alg3, verbose=0)
class SkoptWarmStartTest(AbstractWarmStartTest, unittest.TestCase):
def set_basic_conf(self):