Skip to content

Commit

Permalink
Merge pull request #172 from ChristopherMayes/bugfixes
Browse files Browse the repository at this point in the history
Bugfixes and log acquisition function implementation
  • Loading branch information
roussel-ryan authored Nov 8, 2023
2 parents 5cacd30 + cdb12a9 commit a84bd29
Show file tree
Hide file tree
Showing 7 changed files with 487 additions and 6 deletions.

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions mkdocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ nav:
- Single Objective Bayesian Optimization:
- Bayesian optimization tutorial: examples/single_objective_bayes_opt/bo_tutorial.ipynb
- Constrained optimization: examples/single_objective_bayes_opt/constrained_bo_tutorial.ipynb
- Log-transformed optimization: examples/single_objective_bayes_opt/log_transformed_tutorial.ipynb
- Upper confidence bound: examples/single_objective_bayes_opt/upper_confidence_bound.ipynb
- Custom GP models: examples/single_objective_bayes_opt/custom_model.ipynb
- Trust region: examples/single_objective_bayes_opt/turbo_tutorial.ipynb
Expand Down
20 changes: 20 additions & 0 deletions tests/generators/bayesian/test_turbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,11 @@
from xopt.resources.testing import TEST_VOCS_BASE, TEST_VOCS_DATA


def sin_function(input_dict):
x = input_dict["x"]
return {"f": -10 * np.exp(-((x - np.pi) ** 2) / 0.01) + 0.5 * np.sin(5 * x)}


class TestTurbo(TestCase):
def test_turbo_init(self):
test_vocs = deepcopy(TEST_VOCS_BASE)
Expand Down Expand Up @@ -279,3 +284,18 @@ def test_safety(self):
sturbo.update_state(test_data3, previous_batch_size=3)
assert sturbo.success_counter == 0
assert sturbo.failure_counter == 1

def test_serialization(self):
vocs = VOCS(
variables={"x": [0, 2 * math.pi]},
objectives={"f": "MINIMIZE"},
)

evaluator = Evaluator(function=sin_function)
for name in ["optimize", "safety"]:
generator = UpperConfidenceBoundGenerator(vocs=vocs, turbo_controller=name)
X = Xopt(evaluator=evaluator, generator=generator, vocs=vocs)

yaml_str = X.yaml()
X2 = Xopt.from_yaml(yaml_str)
assert X2.generator.turbo_controller.name == name
19 changes: 19 additions & 0 deletions xopt/generators/bayesian/bayesian_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@
from xopt.generators.bayesian.custom_botorch.constrained_acquisition import (
ConstrainedMCAcquisitionFunction,
)
from xopt.generators.bayesian.custom_botorch.log_acquisition_function import (
LogAcquisitionFunction,
)
from xopt.generators.bayesian.models.standard import StandardModelConstructor
from xopt.generators.bayesian.objectives import (
create_constraint_callables,
Expand Down Expand Up @@ -88,6 +91,10 @@ class BayesianGenerator(Generator, ABC):
computation_time : Optional[pd.DataFrame]
A data frame tracking computation time in seconds.
log_transform_acquisition_function: Optional[bool]
Flag to determine if final acquisition function value should be
log-transformed before optimization.
n_candidates : int
The number of candidates to generate in each optimization step.
Expand Down Expand Up @@ -142,6 +149,11 @@ class BayesianGenerator(Generator, ABC):
None,
description="data frame tracking computation time in seconds",
)
log_transform_acquisition_function: Optional[bool] = Field(
False,
description="flag to log transform the acquisition function before optimization",
)

n_candidates: int = 1

@field_validator("model", mode="before")
Expand Down Expand Up @@ -218,6 +230,10 @@ def validate_turbo_controller(cls, value, info: ValidationInfo):
raise ValueError("turbo input dict needs to have a `name` attribute")
name = value.pop("name")
if name in optimizer_dict:
# pop unnecessary elements
for ele in ["dim"]:
value.pop(ele, None)

value = optimizer_dict[name](vocs=info.data["vocs"], **value)
else:
raise ValueError(
Expand Down Expand Up @@ -447,6 +463,9 @@ def get_acquisition(self, model):
acq_function=acq, d=dim, columns=columns, values=values
)

if self.log_transform_acquisition_function:
acq = LogAcquisitionFunction(acq)

return acq

def get_optimum(self):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import torch
from botorch.acquisition import AcquisitionFunction
from botorch.utils import t_batch_mode_transform
from torch import Tensor
from torch.nn import Module
from torch.nn.functional import softplus


class LogAcquisitionFunction(AcquisitionFunction):
def __init__(
self,
acq_function: AcquisitionFunction,
) -> None:
Module.__init__(self)
self.acq_func = acq_function

@t_batch_mode_transform(expected_q=1, assert_output_shape=False)
def forward(self, X: Tensor) -> Tensor:
# apply a softplus transform to avoid numerical gradient issues
return torch.log(softplus(self.acq_func(X), 20))
10 changes: 6 additions & 4 deletions xopt/generators/bayesian/turbo.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import logging
import math
from abc import ABC, abstractmethod
from typing import Dict, Union
from typing import Dict, Optional, Union

import torch
from botorch.models import ModelListGP
Expand Down Expand Up @@ -46,7 +46,7 @@ class TurboController(XoptBaseModel, ABC):
None,
description="number of successes to trigger a trust region contraction",
)
center_x: Dict[str, float] = Field(None)
center_x: Optional[Dict[str, float]] = Field(None)
scale_factor: float = Field(
2.0, description="multiplier to increase or decrease trust region", gt=1.0
)
Expand Down Expand Up @@ -114,7 +114,8 @@ def update_state(self, data, previous_batch_size: int = 1) -> None:


class OptimizeTurboController(TurboController):
best_value: float = None
name: str = Field("optimize", frozen=True)
best_value: Optional[float] = None

@property
def minimize(self) -> bool:
Expand Down Expand Up @@ -200,7 +201,8 @@ def update_state(self, data: DataFrame, previous_batch_size: int = 1) -> None:


class SafetyTurboController(TurboController):
scale_factor: float = float(1.25)
name: str = Field("safety", frozen=True)
scale_factor: float = 1.25
min_feasible_fraction: float = 0.75

def update_state(self, data, previous_batch_size: int = 1):
Expand Down
14 changes: 12 additions & 2 deletions xopt/pydantic.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,11 +229,21 @@ def from_file(cls, filename: str):

@classmethod
def from_yaml(cls, yaml_obj: [str, TextIO]):
return cls.model_validate(yaml.safe_load(yaml_obj))
return cls.model_validate(remove_none_values(yaml.safe_load(yaml_obj)))

@classmethod
def from_dict(cls, config: dict):
return cls.model_validate(config)
return cls.model_validate(remove_none_values(config))


def remove_none_values(d):
if isinstance(d, dict):
# Create a copy of the dictionary to avoid modifying the original while iterating
d = {k: remove_none_values(v) for k, v in d.items() if v is not None}
elif isinstance(d, list):
# If it's a list, recursively process each item in the list
d = [remove_none_values(item) for item in d]
return d


def get_descriptions_defaults(model: XoptBaseModel):
Expand Down

0 comments on commit a84bd29

Please sign in to comment.