# !pip install ray[tune] !pip install hyperopt==0.2.5 import time import ray from ray import tune from ray.tune.suggest import ConcurrencyLimiter from ray.tune.suggest.hyperopt import HyperOptSearch from hyperopt import hp def evaluate(step, width, height): time.sleep(0.1) return (0.1 + width * step / 100) ** (-1) + height * 0.1 def objective(config): for step in range(config["steps"]): score = evaluate(step, config["width"], config["height"]) tune.report(iterations=step, mean_loss=score) ray.init(configure_logging=False) initial_params = [ {"width": 1, "height": 2}, {"width": 4, "height": 2}, ] algo = HyperOptSearch(points_to_evaluate=initial_params) algo = ConcurrencyLimiter(algo, max_concurrent=4) num_samples = 1000 # If 1000 samples take too long, you can reduce this number. # We override this number here for our smoke tests. num_samples = 10 search_config = { "steps": 100, "width": tune.uniform(0, 20), "height": tune.uniform(-100, 100), "activation": tune.choice(["relu, tanh"]) } analysis = tune.run( objective, search_alg=algo, metric="mean_loss", mode="min", name="hyperopt_exp", num_samples=num_samples, config=search_space, ) print("Best hyperparameters found were: ", analysis.best_config) def evaluation_fn(step, width, height, mult=1): return (0.1 + width * step / 100) ** (-1) + height * 0.1 * mult def objective_two(config): width, height = config["width"], config["height"] sub_dict = config["activation"] mult = sub_dict.get("mult", 1) for step in range(config["steps"]): intermediate_score = evaluation_fn(step, width, height, mult) tune.report(iterations=step, mean_loss=intermediate_score) time.sleep(0.1) conditional_space = { "activation": hp.choice( "activation", [ {"activation": "relu", "mult": hp.uniform("mult", 1, 2)}, {"activation": "tanh"}, ], ), "width": hp.uniform("width", 0, 20), "height": hp.uniform("height", -100, 100), "steps": 100, } algo = HyperOptSearch(space=conditional_space, metric="mean_loss", mode="min") algo = ConcurrencyLimiter(algo, max_concurrent=4) analysis = tune.run( objective_two, metric="mean_loss", mode="min", search_alg=algo, num_samples=num_samples ) print("Best hyperparameters found were: ", analysis.best_config) ray.shutdown()