Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 0 additions & 26 deletions bayes_opt/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
from os import PathLike
from pathlib import Path
from typing import TYPE_CHECKING, Any
from warnings import warn

import numpy as np
from scipy.optimize import NonlinearConstraint
Expand Down Expand Up @@ -138,8 +137,6 @@ def __init__(
raise TypeError(msg)
self._bounds_transformer.initialize(self._space)

self._sorting_warning_already_shown = False # TODO: remove in future version

# Initialize logger
self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained)

Expand Down Expand Up @@ -278,17 +275,6 @@ def register(
constraint_value: float or None
Value of the constraint function at the observation, if any.
"""
# TODO: remove in future version
if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown:
msg = (
"You're attempting to register an np.ndarray. In previous versions, the optimizer internally"
" sorted parameters by key and expected any registered array to respect this order."
" In the current and any future version the order as given by the pbounds dictionary will be"
" used. If you wish to retain sorted parameters, please manually sort your pbounds"
" dictionary before constructing the optimizer."
)
warn(msg, stacklevel=1)
self._sorting_warning_already_shown = True
self._space.register(params, target, constraint_value)
self.logger.log_optimization_step(
self._space.keys, self._space.res()[-1], self._space.params_config, self.max
Expand All @@ -308,18 +294,6 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
If True, the optimizer will evaluate the points when calling
maximize(). Otherwise it will evaluate it at the moment.
"""
# TODO: remove in future version
if isinstance(params, np.ndarray) and not self._sorting_warning_already_shown:
msg = (
"You're attempting to register an np.ndarray. In previous versions, the optimizer internally"
" sorted parameters by key and expected any registered array to respect this order."
" In the current and any future version the order as given by the pbounds dictionary will be"
" used. If you wish to retain sorted parameters, please manually sort your pbounds"
" dictionary before constructing the optimizer."
)
warn(msg, stacklevel=1)
self._sorting_warning_already_shown = True
params = self._space.array_to_params(params)
if lazy:
self._queue.append(params)
else:
Expand Down
23 changes: 23 additions & 0 deletions tests/test_bayesian_optimization.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import pickle
import warnings
from pathlib import Path

import numpy as np
Expand Down Expand Up @@ -97,6 +98,28 @@ def test_register():
optimizer.register(params={"p1": 5, "p2": 4}, target=9)


def test_register_array_uses_pbounds_order_without_warning():
optimizer = BayesianOptimization(target_func, {"p1": (0, 10), "p2": (0, 10)}, random_state=1)

with warnings.catch_warnings(record=True) as caught:
warnings.simplefilter("always")
optimizer.register(params=np.array([1, 2]), target=3)

assert caught == []
assert optimizer.space.array_to_params(optimizer.space.params[0]) == {"p1": 1.0, "p2": 2.0}


def test_probe_array_uses_pbounds_order_without_warning():
optimizer = BayesianOptimization(target_func, {"p1": (0, 10), "p2": (0, 10)}, random_state=1)

with warnings.catch_warnings(record=True) as caught:
warnings.simplefilter("always")
optimizer.probe(params=np.array([1, 2]), lazy=False)

assert caught == []
assert optimizer.space.array_to_params(optimizer.space.params[0]) == {"p1": 1.0, "p2": 2.0}


def test_probe_lazy():
optimizer = BayesianOptimization(target_func, PBOUNDS, random_state=1)

Expand Down