Closed
Description
I ran advanced-tour.ipynb from the top (on jupyter lab and jupyter notebook) as is, but it gave me the following error in the step of the maximize loop.
It seems that optimizer.suggest after an observation data is registered by optimizer.register.
Any idea what is wrong?
Thanks in advance.
This cell gives the error.
for _ in range(5):
next_point = optimizer.suggest(utility)
target = black_box_function(**next_point)
optimizer.register(params=next_point, target=target)
print(target, next_point)
print(optimizer.max)
Error message:
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-9-ff775cb8cb9c> in <module>
1 for _ in range(5):
----> 2 next_point = optimizer.suggest(utility)
3 target = black_box_function(**next_point)
4 optimizer.register(params=next_point, target=target)
5
~/.venv/venv1/lib/python3.7/site-packages/bayes_opt/bayesian_optimization.py in suggest(self, utility_function)
126 with warnings.catch_warnings():
127 warnings.simplefilter("ignore")
--> 128 self._gp.fit(self._space.params, self._space.target)
129
130 # Finding argmax of the acquisition function.
~/.venv/venv1/lib/python3.7/site-packages/sklearn/gaussian_process/_gpr.py in fit(self, X, y)
232 optima = [(self._constrained_optimization(obj_func,
233 self.kernel_.theta,
--> 234 self.kernel_.bounds))]
235
236 # Additional runs are performed from log-uniform chosen initial
~/.venv/venv1/lib/python3.7/site-packages/sklearn/gaussian_process/_gpr.py in _constrained_optimization(self, obj_func, initial_theta, bounds)
501 opt_res = scipy.optimize.minimize(
502 obj_func, initial_theta, method="L-BFGS-B", jac=True,
--> 503 bounds=bounds)
504 _check_optimize_result("lbfgs", opt_res)
505 theta_opt, func_min = opt_res.x, opt_res.fun
~/.venv/venv1/lib/python3.7/site-packages/scipy/optimize/_minimize.py in minimize(fun, x0, args, method, jac, hess, hessp, bounds, constraints, tol, callback, options)
608 elif meth == 'l-bfgs-b':
609 return _minimize_lbfgsb(fun, x0, args, jac, bounds,
--> 610 callback=callback, **options)
611 elif meth == 'tnc':
612 return _minimize_tnc(fun, x0, args, jac, bounds, callback=callback,
~/.venv/venv1/lib/python3.7/site-packages/scipy/optimize/lbfgsb.py in _minimize_lbfgsb(fun, x0, args, jac, bounds, disp, maxcor, ftol, gtol, eps, maxfun, maxiter, iprint, callback, maxls, **unknown_options)
343 # until the completion of the current minimization iteration.
344 # Overwrite f and g:
--> 345 f, g = func_and_grad(x)
346 elif task_str.startswith(b'NEW_X'):
347 # new iteration
~/.venv/venv1/lib/python3.7/site-packages/scipy/optimize/lbfgsb.py in func_and_grad(x)
293 else:
294 def func_and_grad(x):
--> 295 f = fun(x, *args)
296 g = jac(x, *args)
297 return f, g
~/.venv/venv1/lib/python3.7/site-packages/scipy/optimize/optimize.py in function_wrapper(*wrapper_args)
325 def function_wrapper(*wrapper_args):
326 ncalls[0] += 1
--> 327 return function(*(wrapper_args + args))
328
329 return ncalls, function_wrapper
~/.venv/venv1/lib/python3.7/site-packages/scipy/optimize/optimize.py in __call__(self, x, *args)
63 def __call__(self, x, *args):
64 self.x = numpy.asarray(x).copy()
---> 65 fg = self.fun(x, *args)
66 self.jac = fg[1]
67 return fg[0]
~/.venv/venv1/lib/python3.7/site-packages/sklearn/gaussian_process/_gpr.py in obj_func(theta, eval_gradient)
223 if eval_gradient:
224 lml, grad = self.log_marginal_likelihood(
--> 225 theta, eval_gradient=True, clone_kernel=False)
226 return -lml, -grad
227 else:
~/.venv/venv1/lib/python3.7/site-packages/sklearn/gaussian_process/_gpr.py in log_marginal_likelihood(self, theta, eval_gradient, clone_kernel)
474 y_train = y_train[:, np.newaxis]
475
--> 476 alpha = cho_solve((L, True), y_train) # Line 3
477
478 # Compute log-likelihood (compare line 7)
~/.venv/venv1/lib/python3.7/site-packages/scipy/linalg/decomp_cholesky.py in cho_solve(c_and_lower, b, overwrite_b, check_finite)
194 (c, lower) = c_and_lower
195 if check_finite:
--> 196 b1 = asarray_chkfinite(b)
197 c = asarray_chkfinite(c)
198 else:
~/.venv/venv1/lib/python3.7/site-packages/numpy/lib/function_base.py in asarray_chkfinite(a, dtype, order)
497 if a.dtype.char in typecodes['AllFloat'] and not np.isfinite(a).all():
498 raise ValueError(
--> 499 "array must not contain infs or NaNs")
500 return a
501
ValueError: array must not contain infs or NaNs
Here is the misc information on the packages of mine:
Python 3.7.6
bayesian-optimization 1.2.0
numpy 1.18.4
scipy 1.4.1
scikit-learn 0.23.0
joblib 0.15.1
threadpoolctl 2.0.0