maxim5 / hyper-engine

Python library for Bayesian hyper-parameters optimization

Home Page:https://pypi.python.org/pypi/hyperengine

Geek Repo:Geek Repo

Github PK Tool:Github PK Tool

Black box optimizer example

KOLANICH opened this issue · comments

import hyperengine
import numpy as np

def rosenbrock(hyperparams):
	return (hyperparams["x"]-1)**2 + 10*(hyperparams["x"]**2-hyperparams["y"])**2

class BlackBoxSolver:
	def __init__(self, func):
		self.func=func
		self._val_loss_curve = []

	def train(self):
		loss=self.func()
		self._val_loss_curve.append(loss)
		return self._reducer(self._val_loss_curve)
	
	def _reducer(self, *args, **kwargs):
		return np.min(*args, **kwargs)
	
	def terminate(self):
		pass

def solver_generator(hyperparams):
	return BlackBoxSolver(partial(rosenbrock, hyperparams))

class IterLimitedHyperTuner(hyperengine.HyperTuner):
	def __init__(self, hyper_params_spec, solver_generator, iterLimit, *args, **kwargs):
		j=0
		def solver_generator_limited(hyperparams):
			nonlocal j
			if j<iterLimit:
				j+=1
				return solver_generator(hyperparams)
			else:
				raise StopIteration()
		
		super().__init__(hyper_params_spec, solver_generator_limited, *args, **kwargs)
	
	def tune(self):
		try:
			super().tune()
		except StopIteration as ex:
			minLossPointNum=np.argmin(self.strategy.values)
			return dict(zip(self.parsed._spec.keys(), self.strategy.points[minLossPointNum]))


spec=hp.new({
	"x": hp.uniform(-10, 10),
	"y": hp.uniform(-10, 10),
})
tuner=IterLimitedHyperTuner(spec, solver_generator, iterLimit=10, strategy='bayesian') #'portfolio'
tuner.tune()

@KOLANICH Can you describe this code please?

The lower part is a workaround for then missing iteration limit. The upper part is an adapter adapting any function into HyperSolver-compatible object.