Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor ACQF Optimization #535

Draft
wants to merge 33 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
33 commits
Select commit Hold shift + click to select a range
5d3f792
refactorings and thoughts
jduerholt Mar 4, 2025
adc7092
added structure for data-models for Optimizers
LukasHebing Mar 7, 2025
4aea808
after hooks
LukasHebing Mar 7, 2025
8087f0d
moved optimizer-dependent data-model fields, and validators
LukasHebing Mar 7, 2025
652dd0a
after hooks
LukasHebing Mar 7, 2025
389402d
moved descriptor/categorical/discrete method field from botorch strat…
LukasHebing Mar 7, 2025
475d50f
after hooks
LukasHebing Mar 7, 2025
9bf473d
moved data-model tests
LukasHebing Mar 7, 2025
1c4cadb
renamed appearances of num... to n...
LukasHebing Mar 7, 2025
29d8b42
data-model tests run
LukasHebing Mar 7, 2025
dbd98a1
after hooks
LukasHebing Mar 7, 2025
7a9a5e3
included Optimizer mapping in mapper_actual.py
LukasHebing Mar 7, 2025
bf04ba8
initialized optimizer in strategy __init__, using mapper function
LukasHebing Mar 7, 2025
d027dba
moved methods, WIP changing input structures
LukasHebing Mar 7, 2025
3f5c817
WIP: moving methods to Optimizer
LukasHebing Mar 7, 2025
36e7b21
WIP
LukasHebing Mar 7, 2025
7825d69
WIP: changing method signatures
LukasHebing Mar 10, 2025
a565033
after hooks
LukasHebing Mar 10, 2025
3ee0dbe
some dubugging
LukasHebing Mar 10, 2025
6d4b777
after hooks
LukasHebing Mar 10, 2025
4d8984f
adapted (some) tests to acq. optimizer data model call
LukasHebing Mar 10, 2025
709355b
added testfile for optimizer
LukasHebing Mar 11, 2025
1a0aada
added strategy mapping for benchmark fixes
LukasHebing Mar 11, 2025
b247cc0
debugged: missing domain,... args in function calls
LukasHebing Mar 11, 2025
0fa332d
after hooks
LukasHebing Mar 11, 2025
5781d66
moved discrete optimization to Optimizer base class
LukasHebing Mar 12, 2025
60d9e46
added categoric search to optimizer base-class
LukasHebing Mar 13, 2025
f08f9ad
added test for all-categoric optimization (optimize_acqf_discrete)
LukasHebing Mar 13, 2025
909809b
after hooks
LukasHebing Mar 13, 2025
f61236c
removed commented test-cases
LukasHebing Mar 13, 2025
5a0c1fd
added type-annotations
LukasHebing Mar 13, 2025
76e9af4
changed method "is_constraint_implemented" from classmethod to conven…
LukasHebing Mar 13, 2025
30ce1e8
after hooks
LukasHebing Mar 13, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 11 additions & 2 deletions bofire/data_models/strategies/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,15 @@
FractionalFactorialStrategy,
)
from bofire.data_models.strategies.meta_strategy_type import MetaStrategy
from bofire.data_models.strategies.predictives.acqf_optimization import (
LSRBO,
AcquisitionOptimizer,
BotorchOptimizer,
)
from bofire.data_models.strategies.predictives.active_learning import (
ActiveLearningStrategy,
)
from bofire.data_models.strategies.predictives.botorch import LSRBO, BotorchStrategy
from bofire.data_models.strategies.predictives.botorch import BotorchStrategy
from bofire.data_models.strategies.predictives.enting import EntingStrategy
from bofire.data_models.strategies.predictives.mobo import MoboStrategy
from bofire.data_models.strategies.predictives.multi_fidelity import (
Expand Down Expand Up @@ -76,5 +81,9 @@
MoboStrategy,
]


AnyLocalSearchConfig = LSRBO

AnyAcqfOptimizer = Union[
AcquisitionOptimizer,
BotorchOptimizer,
]
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/doe.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,7 @@ class DoEStrategy(Strategy):
verbose: bool = False # get rid of this at a later stage
ipopt_options: Optional[Dict] = None

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
return True

@classmethod
Expand Down
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/factorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,7 @@ class FactorialStrategy(Strategy):

type: Literal["FactorialStrategy"] = "FactorialStrategy" # type: ignore

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
return False

@classmethod
Expand Down
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/fractional_factorial.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,8 +51,7 @@ class FractionalFactorialStrategy(Strategy):
description="If true, the run order is randomized, else it is deterministic.",
)

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
return False

@classmethod
Expand Down
108 changes: 108 additions & 0 deletions bofire/data_models/strategies/predictives/acqf_optimization.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,108 @@
from abc import abstractmethod
from typing import Annotated, Literal, Optional, Type

from pydantic import Field, PositiveInt, field_validator

from bofire.data_models.base import BaseModel
from bofire.data_models.constraints import api as constraints
from bofire.data_models.enum import CategoricalMethodEnum
from bofire.data_models.types import IntPowerOfTwo


class AcquisitionOptimizer(BaseModel):
prefer_exhaustive_search_for_purely_categorical_domains: bool = True

def is_constraint_implemented(self, my_type: Type[constraints.Constraint]) -> bool:
"""Checks if a constraint is implemented. Currently only linear constraints are supported.

Args:
my_type (Type[Feature]): The type of the constraint.

Returns:
bool: True if the constraint is implemented, False otherwise.

"""
return True


class LocalSearchConfig(BaseModel):
"""LocalSearchConfigs provide a way to define how to switch between global
acqf optimization in the global bounds and local acqf optimization in the local
reference bounds.
"""

type: str

@abstractmethod
def is_local_step(self, acqf_local: float, acqf_global: float) -> bool:
"""Abstract switching function between local and global acqf optimum.

Args:
acqf_local (float): Local acqf value.
acqf_global (float): Global acqf value.

Returns:
bool: If true, do local step, else a step towards the global acqf maximum.

"""


class LSRBO(LocalSearchConfig):
"""LSRBO implements the local search region method published in.
https://www.merl.com/publications/docs/TR2023-057.pdf

Attributes:
gamma (float): The switsching parameter between local and global optimization.
Defaults to 0.1.

"""

type: Literal["LSRBO"] = "LSRBO"
gamma: Annotated[float, Field(ge=0)] = 0.1

def is_local_step(self, acqf_local: float, acqf_global: float) -> bool:
return acqf_local >= self.gamma


AnyLocalSearchConfig = LSRBO


class BotorchOptimizer(AcquisitionOptimizer):
n_restarts: PositiveInt = 8
n_raw_samples: IntPowerOfTwo = 1024
maxiter: PositiveInt = 2000
batch_limit: Optional[PositiveInt] = Field(default=None, validate_default=True)

# encoding params
descriptor_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
categorical_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
discrete_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE

# local search region params
local_search_config: Optional[AnyLocalSearchConfig] = None

@field_validator("batch_limit")
@classmethod
def validate_batch_limit(cls, batch_limit: int, info):
batch_limit = min(
batch_limit or info.data["n_restarts"],
info.data["n_restarts"],
)
return batch_limit

def is_constraint_implemented(self, my_type: Type[constraints.Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy

Args:
my_type (Type[Constraint]): Constraint class

Returns:
bool: True if the constraint type is valid for the strategy chosen, False otherwise

"""
if my_type in [
constraints.NonlinearInequalityConstraint,
constraints.NonlinearEqualityConstraint,
]:
return False
return True
137 changes: 42 additions & 95 deletions bofire/data_models/strategies/predictives/botorch.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,12 @@
import warnings
from abc import abstractmethod
from typing import Annotated, Literal, Optional, Type
from typing import Annotated, Optional, Type

from pydantic import Field, PositiveInt, field_validator, model_validator
from pydantic import Field, PositiveInt, model_validator

from bofire.data_models.base import BaseModel
from bofire.data_models.constraints.api import (
Constraint,
InterpointConstraint,
LinearConstraint,
NonlinearEqualityConstraint,
NonlinearInequalityConstraint,
)
from bofire.data_models.domain.api import Domain, Outputs
from bofire.data_models.enum import CategoricalEncodingEnum, CategoricalMethodEnum
Expand All @@ -21,6 +17,10 @@
TaskInput,
)
from bofire.data_models.outlier_detection.api import OutlierDetections
from bofire.data_models.strategies.predictives.acqf_optimization import (
AcquisitionOptimizer,
BotorchOptimizer,
)
from bofire.data_models.strategies.predictives.predictive import PredictiveStrategy
from bofire.data_models.strategies.shortest_path import has_local_search_region
from bofire.data_models.surrogates.api import (
Expand All @@ -29,61 +29,14 @@
MultiTaskGPSurrogate,
SingleTaskGPSurrogate,
)
from bofire.data_models.types import IntPowerOfTwo


class LocalSearchConfig(BaseModel):
"""LocalSearchConfigs provide a way to define how to switch between global
acqf optimization in the global bounds and local acqf optimization in the local
reference bounds.
"""

type: str

@abstractmethod
def is_local_step(self, acqf_local: float, acqf_global: float) -> bool:
"""Abstract switching function between local and global acqf optimum.

Args:
acqf_local (float): Local acqf value.
acqf_global (float): Global acqf value.

Returns:
bool: If true, do local step, else a step towards the global acqf maximum.

"""


class LSRBO(LocalSearchConfig):
"""LSRBO implements the local search region method published in.
https://www.merl.com/publications/docs/TR2023-057.pdf

Attributes:
gamma (float): The switsching parameter between local and global optimization.
Defaults to 0.1.

"""

type: Literal["LSRBO"] = "LSRBO"
gamma: Annotated[float, Field(ge=0)] = 0.1

def is_local_step(self, acqf_local: float, acqf_global: float) -> bool:
return acqf_local >= self.gamma


AnyLocalSearchConfig = LSRBO


class BotorchStrategy(PredictiveStrategy):
# acqf optimizer params
num_restarts: PositiveInt = 8
num_raw_samples: IntPowerOfTwo = 1024
maxiter: PositiveInt = 2000
batch_limit: Optional[PositiveInt] = Field(default=None, validate_default=True)
# encoding params
descriptor_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
categorical_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
discrete_method: CategoricalMethodEnum = CategoricalMethodEnum.EXHAUSTIVE
# acquisition optimizer
acquisition_optimizer: AcquisitionOptimizer = Field(
default_factory=lambda: BotorchOptimizer()
)

surrogate_specs: BotorchSurrogates = Field(
default_factory=lambda: BotorchSurrogates(surrogates=[]),
validate_default=True,
Expand All @@ -95,21 +48,13 @@
# hyperopt params
frequency_hyperopt: Annotated[int, Field(ge=0)] = 0 # 0 indicates no hyperopt
folds: int = 5
# local search region params
local_search_config: Optional[AnyLocalSearchConfig] = None

@field_validator("batch_limit")
@classmethod
def validate_batch_limit(cls, batch_limit: int, info):
batch_limit = min(
batch_limit or info.data["num_restarts"],
info.data["num_restarts"],
)
return batch_limit

@model_validator(mode="after")
def validate_local_search_config(self):
if self.local_search_config is not None:
if not isinstance(self.acquisition_optimizer, BotorchOptimizer):
return self

if self.acquisition_optimizer.local_search_config is not None:
if has_local_search_region(self.domain) is False:
warnings.warn(
"`local_search_region` config is specified, but no local search region is defined in `domain`",
Expand All @@ -122,9 +67,9 @@
raise ValueError("LSR-BO only supported for linear constraints.")
return self

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy. For optimizer-specific
strategies, this is passed to the optimizer check.

Args:
my_type (Type[Constraint]): Constraint class
Expand All @@ -133,9 +78,7 @@
bool: True if the constraint type is valid for the strategy chosen, False otherwise

"""
if my_type in [NonlinearInequalityConstraint, NonlinearEqualityConstraint]:
return False
return True
return self.acquisition_optimizer.is_constraint_implemented(my_type)

@model_validator(mode="after")
def validate_interpoint_constraints(self):
Expand All @@ -158,27 +101,31 @@
# categorical_method = (
# values["categorical_method"] if "categorical_method" in values else None
# )
if self.categorical_method == CategoricalMethodEnum.FREE:
for m in self.surrogate_specs.surrogates:
if isinstance(m, MixedSingleTaskGPSurrogate):
raise ValueError(
"Categorical method FREE not compatible with a a MixedSingleTaskGPModel.",
)
# we also check that if a categorical with descriptor method is used as one hot encoded the same method is
# used for the descriptor as for the categoricals
for m in self.surrogate_specs.surrogates:
keys = m.inputs.get_keys(CategoricalDescriptorInput)
for k in keys:
input_proc_specs = (
m.input_preprocessing_specs[k]
if k in m.input_preprocessing_specs
else None
)
if input_proc_specs == CategoricalEncodingEnum.ONE_HOT:
if self.categorical_method != self.descriptor_method:
if isinstance(self.acquisition_optimizer, BotorchOptimizer):
if (
self.acquisition_optimizer.categorical_method
== CategoricalMethodEnum.FREE
):
for m in self.surrogate_specs.surrogates:
if isinstance(m, MixedSingleTaskGPSurrogate):
raise ValueError(
"One-hot encoded CategoricalDescriptorInput features has to be treated with the same method as categoricals.",
"Categorical method FREE not compatible with a a MixedSingleTaskGPModel.",
)
# we also check that if a categorical with descriptor method is used as one hot encoded the same method is
# used for the descriptor as for the categoricals
for m in self.surrogate_specs.surrogates:
keys = m.inputs.get_keys(CategoricalDescriptorInput)
for k in keys:
input_proc_specs = (
m.input_preprocessing_specs[k]
if k in m.input_preprocessing_specs
else None
)
if input_proc_specs == CategoricalEncodingEnum.ONE_HOT:
if self.categorical_method != self.descriptor_method:

Check failure on line 125 in bofire/data_models/strategies/predictives/botorch.py

View workflow job for this annotation

GitHub Actions / types

Cannot access member "descriptor_method" for type "BotorchStrategy"   Member "descriptor_method" is unknown (reportGeneralTypeIssues)

Check failure on line 125 in bofire/data_models/strategies/predictives/botorch.py

View workflow job for this annotation

GitHub Actions / types

Cannot access member "categorical_method" for type "BotorchStrategy"   Member "categorical_method" is unknown (reportGeneralTypeIssues)
raise ValueError(
"One-hot encoded CategoricalDescriptorInput features has to be treated with the same method as categoricals.",
)
return self

@model_validator(mode="after")
Expand Down
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/predictives/enting.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@ class EntingStrategy(PredictiveStrategy):
# a value of zero implies future observations will be exactly the mean
kappa_fantasy: float = 1.96

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
return my_type in [
LinearEqualityConstraint,
LinearInequalityConstraint,
Expand Down
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/predictives/qparego.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@ def is_objective_implemented(cls, my_type: Type[Objective]) -> bool:
return False
return True

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
"""Method to check if a specific constraint type is implemented for the strategy

Args:
Expand Down
3 changes: 1 addition & 2 deletions bofire/data_models/strategies/random.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,7 @@ class RandomStrategy(Strategy):
num_base_samples: Optional[Annotated[int, Field(gt=0)]] = None
max_iters: Annotated[int, Field(gt=0)] = 1000

@classmethod
def is_constraint_implemented(cls, my_type: Type[Constraint]) -> bool:
def is_constraint_implemented(self, my_type: Type[Constraint]) -> bool:
return my_type in [
LinearInequalityConstraint,
LinearEqualityConstraint,
Expand Down
Loading
Loading