mirror of
https://github.com/vale981/ray
synced 2025-03-10 13:26:39 -04:00

This PR renames the `suggest` package to `search` and alters the layout slightly. In the new package, the higher-level abstractions are on the top level and the search algorithms have their own subdirectories. In a future refactor, we can turn algorithms such as PBT into actual `SearchAlgorithm` classes and move them into the `search` package. The main reason to keep algorithms and searchers in the same directory is to avoid user confusion - for a user, `Bayesopt` is as much a search algorithm as e.g. `PBT`, so it doesn't make sense to split them up.
31 lines
904 B
Python
31 lines
904 B
Python
# flake8: noqa
|
|
|
|
accuracy = 42
|
|
|
|
# __keras_hyperopt_start__
|
|
from ray import tune
|
|
from ray.tune.search.hyperopt import HyperOptSearch
|
|
import keras
|
|
|
|
|
|
# 1. Wrap a Keras model in an objective function.
|
|
def objective(config):
|
|
model = keras.models.Sequential()
|
|
model.add(keras.layers.Dense(784, activation=config["activation"]))
|
|
model.add(keras.layers.Dense(10, activation="softmax"))
|
|
|
|
model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"])
|
|
# model.fit(...)
|
|
# loss, accuracy = model.evaluate(...)
|
|
return {"accuracy": accuracy}
|
|
|
|
|
|
# 2. Define a search space and initialize the search algorithm.
|
|
search_space = {"activation": tune.choice(["relu", "tanh"])}
|
|
algo = HyperOptSearch()
|
|
|
|
# 3. Start a Tune run that maximizes accuracy.
|
|
analysis = tune.run(
|
|
objective, search_alg=algo, config=search_space, metric="accuracy", mode="max"
|
|
)
|
|
# __keras_hyperopt_end__
|