Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions bayes_opt/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
import importlib.metadata

from bayes_opt import acquisition
from bayes_opt.bayesian_optimization import BayesianOptimization, Events
from bayes_opt.bayesian_optimization import BayesianOptimization
from bayes_opt.constraint import ConstraintModel
from bayes_opt.domain_reduction import SequentialDomainReductionTransformer
from bayes_opt.logger import JSONLogger, ScreenLogger
from bayes_opt.logger import ScreenLogger
from bayes_opt.target_space import TargetSpace

__version__ = importlib.metadata.version("bayesian-optimization")
Expand All @@ -19,8 +19,6 @@
"BayesianOptimization",
"TargetSpace",
"ConstraintModel",
"Events",
"ScreenLogger",
"JSONLogger",
"SequentialDomainReductionTransformer",
]
80 changes: 35 additions & 45 deletions bayes_opt/bayesian_optimization.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,13 @@
from bayes_opt import acquisition
from bayes_opt.constraint import ConstraintModel
from bayes_opt.domain_reduction import DomainTransformer
from bayes_opt.event import DEFAULT_EVENTS, Events
from bayes_opt.logger import _get_default_logger
from bayes_opt.logger import ScreenLogger
from bayes_opt.parameter import wrap_kernel
from bayes_opt.target_space import TargetSpace
from bayes_opt.util import ensure_rng

if TYPE_CHECKING:
from collections.abc import Callable, Iterable, Mapping
from collections.abc import Callable, Mapping

from numpy.random import RandomState
from numpy.typing import NDArray
Expand All @@ -41,35 +40,7 @@
Float = np.floating[Any]


class Observable:
"""Inspired by https://www.protechtraining.com/blog/post/879#simple-observer."""

def __init__(self, events: Iterable[Any]) -> None:
# maps event names to subscribers
# str -> dict
self._events = {event: dict() for event in events}

def get_subscribers(self, event: Any) -> Any:
"""Return the subscribers of an event."""
return self._events[event]

def subscribe(self, event: Any, subscriber: Any, callback: Callable[..., Any] | None = None) -> None:
"""Add subscriber to an event."""
if callback is None:
callback = subscriber.update
self.get_subscribers(event)[subscriber] = callback

def unsubscribe(self, event: Any, subscriber: Any) -> None:
"""Remove a subscriber for a particular event."""
del self.get_subscribers(event)[subscriber]

def dispatch(self, event: Any) -> None:
"""Trigger callbacks for subscribers of an event."""
for callback in self.get_subscribers(event).values():
callback(event, self)


class BayesianOptimization(Observable):
class BayesianOptimization:
"""Handle optimization of a target function over a specific target space.

This class takes the function to optimize as well as the parameters bounds
Expand Down Expand Up @@ -105,6 +76,27 @@ class BayesianOptimization(Observable):
This behavior may be desired in high noise situations where repeatedly probing
the same point will give different answers. In other situations, the acquisition
may occasionally generate a duplicate point.

Attributes
----------
space : TargetSpace
The target space object containing the function to optimize.

acquisition_function : AcquisitionFunction
The acquisition function used to propose new points.

constraint : ConstraintModel or None
The constraint model, if any.

max : dict or None
Maximum target value observed and corresponding parameters.

res : list
All target values observed and corresponding parameters and timestamps.

logger : ScreenLogger
The logger used for displaying optimization progress.
You can customize the logger's properties (e.g., colors, verbosity, formatting).
"""

def __init__(
Expand Down Expand Up @@ -173,7 +165,9 @@ def __init__(
self._bounds_transformer.initialize(self._space)

self._sorting_warning_already_shown = False # TODO: remove in future version
super().__init__(events=DEFAULT_EVENTS)

# Initialize logger
self.logger = ScreenLogger(verbose=self._verbose, is_constrained=self.is_constrained)

@property
def space(self) -> TargetSpace:
Expand Down Expand Up @@ -236,7 +230,7 @@ def register(
warn(msg, stacklevel=1)
self._sorting_warning_already_shown = True
self._space.register(params, target, constraint_value)
self.dispatch(Events.OPTIMIZATION_STEP)
self.logger.log_optimization_step(self)

def probe(self, params: ParamsType, lazy: bool = True) -> None:
"""Evaluate the function at the given points.
Expand Down Expand Up @@ -268,7 +262,7 @@ def probe(self, params: ParamsType, lazy: bool = True) -> None:
self._queue.append(params)
else:
self._space.probe(params)
self.dispatch(Events.OPTIMIZATION_STEP)
self.logger.log_optimization_step(self)

def suggest(self) -> dict[str, float | NDArray[Float]]:
"""Suggest a promising point to probe next."""
Expand All @@ -295,13 +289,6 @@ def _prime_queue(self, init_points: int) -> None:
sample = self._space.random_sample(random_state=self._random_state)
self._queue.append(self._space.array_to_params(sample))

def _prime_subscriptions(self) -> None:
if not any([len(subs) for subs in self._events.values()]):
_logger = _get_default_logger(self._verbose, self.is_constrained)
self.subscribe(Events.OPTIMIZATION_START, _logger)
self.subscribe(Events.OPTIMIZATION_STEP, _logger)
self.subscribe(Events.OPTIMIZATION_END, _logger)

def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
r"""
Maximize the given function over the target space.
Expand All @@ -324,8 +311,10 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
optimization routine, make sure to fit it manually, e.g. by calling
``optimizer._gp.fit(optimizer.space.params, optimizer.space.target)``.
"""
self._prime_subscriptions()
self.dispatch(Events.OPTIMIZATION_START)
# Log optimization start
self.logger.log_optimization_start(self)

# Prime the queue with random points
self._prime_queue(init_points)

iteration = 0
Expand All @@ -342,7 +331,8 @@ def maximize(self, init_points: int = 5, n_iter: int = 25) -> None:
# the init_points points (only for the true iterations)
self.set_bounds(self._bounds_transformer.transform(self._space))

self.dispatch(Events.OPTIMIZATION_END)
# Log optimization end
self.logger.log_optimization_end(self)

def set_bounds(self, new_bounds: BoundsMapping) -> None:
"""Modify the bounds of the search space.
Expand Down
17 changes: 0 additions & 17 deletions bayes_opt/event.py

This file was deleted.

150 changes: 58 additions & 92 deletions bayes_opt/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,48 +2,18 @@

from __future__ import annotations

import json
from contextlib import suppress
from pathlib import Path
from datetime import datetime
from typing import TYPE_CHECKING, Any

import numpy as np
from colorama import Fore, just_fix_windows_console

from bayes_opt.event import Events
from bayes_opt.observer import _Tracker

if TYPE_CHECKING:
from os import PathLike

from bayes_opt.bayesian_optimization import BayesianOptimization

just_fix_windows_console()


def _get_default_logger(verbose: int, is_constrained: bool) -> ScreenLogger:
"""
Return the default logger.

Parameters
----------
verbose : int
Verbosity level of the logger.

is_constrained : bool
Whether the underlying optimizer uses constraints (this requires
an additional column in the output).

Returns
-------
ScreenLogger
The default logger.

"""
return ScreenLogger(verbose=verbose, is_constrained=is_constrained)


class ScreenLogger(_Tracker):
class ScreenLogger:
"""Logger that outputs text, e.g. to log to a terminal.

Parameters
Expand All @@ -66,7 +36,11 @@ def __init__(self, verbose: int = 2, is_constrained: bool = False) -> None:
self._verbose = verbose
self._is_constrained = is_constrained
self._header_length = None
super().__init__()
self._iterations = 0
self._previous_max = None
self._previous_max_params = None
self._start_time = None
self._previous_time = None

@property
def verbose(self) -> int:
Expand Down Expand Up @@ -221,84 +195,76 @@ def _is_new_max(self, instance: BayesianOptimization) -> bool:
self._previous_max = instance.max["target"]
return instance.max["target"] > self._previous_max

def update(self, event: str, instance: BayesianOptimization) -> None:
"""Handle incoming events.
def _update_tracker(self, instance: BayesianOptimization) -> None:
"""Update the tracker.

Parameters
----------
event : str
One of the values associated with `Events.OPTIMIZATION_START`,
`Events.OPTIMIZATION_STEP` or `Events.OPTIMIZATION_END`.

instance : bayesian_optimization.BayesianOptimization
The instance associated with the step.
"""
line = ""
if event == Events.OPTIMIZATION_START:
line = self._header(instance) + "\n"
elif event == Events.OPTIMIZATION_STEP:
is_new_max = self._is_new_max(instance)
if self._verbose != 1 or is_new_max:
colour = self._colour_new_max if is_new_max else self._colour_regular_message
line = self._step(instance, colour=colour) + "\n"
elif event == Events.OPTIMIZATION_END:
line = "=" * self._header_length + "\n"
self._iterations += 1

if self._verbose:
print(line, end="")
self._update_tracker(event, instance)


class JSONLogger(_Tracker):
"""
Logger that outputs steps in JSON format.
if instance.max is None:
return

The resulting file can be used to restart the optimization from an earlier state.
current_max = instance.max

Parameters
----------
path : str or os.PathLike
Path to the file to write to.
if self._previous_max is None or current_max["target"] > self._previous_max:
self._previous_max = current_max["target"]
self._previous_max_params = current_max["params"]

reset : bool
Whether to overwrite the file if it already exists.
def _time_metrics(self) -> tuple[str, float, float]:
"""Return time passed since last call."""
now = datetime.now() # noqa: DTZ005
if self._start_time is None:
self._start_time = now
if self._previous_time is None:
self._previous_time = now

"""
time_elapsed = now - self._start_time
time_delta = now - self._previous_time

def __init__(self, path: str | PathLike[str], reset: bool = True):
self._path = Path(path)
if reset:
with suppress(OSError):
self._path.unlink(missing_ok=True)
super().__init__()
self._previous_time = now
return (now.strftime("%Y-%m-%d %H:%M:%S"), time_elapsed.total_seconds(), time_delta.total_seconds())

def update(self, event: str, instance: BayesianOptimization) -> None:
"""
Handle incoming events.
def log_optimization_start(self, instance: BayesianOptimization) -> None:
"""Log the start of the optimization process.

Parameters
----------
event : str
One of the values associated with `Events.OPTIMIZATION_START`,
`Events.OPTIMIZATION_STEP` or `Events.OPTIMIZATION_END`.

instance : bayesian_optimization.BayesianOptimization
The instance associated with the step.

instance : BayesianOptimization
The instance associated with the event.
"""
if event == Events.OPTIMIZATION_STEP:
data = dict(instance.res[-1])
if self._verbose:
line = self._header(instance) + "\n"
print(line, end="")

now, time_elapsed, time_delta = self._time_metrics()
data["datetime"] = {"datetime": now, "elapsed": time_elapsed, "delta": time_delta}
def log_optimization_step(self, instance: BayesianOptimization) -> None:
"""Log an optimization step.

if "allowed" in data: # fix: github.com/fmfn/BayesianOptimization/issues/361
data["allowed"] = bool(data["allowed"])
Parameters
----------
instance : BayesianOptimization
The instance associated with the event.
"""
is_new_max = self._is_new_max(instance)
self._update_tracker(instance)

if "constraint" in data and isinstance(data["constraint"], np.ndarray):
data["constraint"] = data["constraint"].tolist()
if self._verbose != 1 or is_new_max:
colour = self._colour_new_max if is_new_max else self._colour_regular_message
line = self._step(instance, colour=colour) + "\n"
if self._verbose:
print(line, end="")

with self._path.open("a") as f:
f.write(json.dumps(data) + "\n")
def log_optimization_end(self, instance: BayesianOptimization) -> None:
"""Log the end of the optimization process.

self._update_tracker(event, instance)
Parameters
----------
instance : BayesianOptimization
The instance associated with the event.
"""
if self._verbose and self._header_length is not None:
line = "=" * self._header_length + "\n"
print(line, end="")
Loading