Skip to content

Commit

Permalink
Adding Desirability functions as objectives (#497)
Browse files Browse the repository at this point in the history
* initial commit of numerical objectives. Needs to be adjusted, tested etc.

* moved torch functions to torch_tools.py

* removed torch dependencies from data-model

* added validators and tests for desirability data-models

* after hooks

* added test for "get_objective_callable"

* after hooks

* added tutorial notebook desirability_objectives.ipynb

* after hooks

* added to AnyRealObjective

* after hooks

* changed validators to model validators

* added type: Literals to objectives

* after hooks

* after hooks

* debugged new validators

* after hooks

* fixed tests

* after hooks

* got rid of using desirability base class as actual usable class

* - moved clip to abstract class
- desirability base class inherits from _SeriesNumpyCallable
- deleted pyright_output.txt

* added abstractmehtod decorator

* after hooks

* changed data model specs for tests

* changed bounds defs in specs to lists

* debugged invalid specs definition

* after hooks

* moved helper class with __call__ method to abstract desirability class

* after hooks
  • Loading branch information
LukasHebing authored Jan 21, 2025
1 parent c96a566 commit 4d6d6c6
Show file tree
Hide file tree
Showing 6 changed files with 620 additions and 1 deletion.
21 changes: 20 additions & 1 deletion bofire/data_models/objectives/api.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
from typing import Union

from bofire.data_models.objectives.categorical import ConstrainedCategoricalObjective
from bofire.data_models.objectives.desirabilities import (
DecreasingDesirabilityObjective,
DesirabilityObjective,
IncreasingDesirabilityObjective,
PeakDesirabilityObjective,
)
from bofire.data_models.objectives.identity import (
IdentityObjective,
MaximizeObjective,
Expand All @@ -25,6 +31,7 @@
IdentityObjective,
SigmoidObjective,
ConstrainedObjective,
DesirabilityObjective,
]

AnyCategoricalObjective = ConstrainedCategoricalObjective
Expand All @@ -36,7 +43,15 @@
TargetObjective,
]

AnyRealObjective = Union[MaximizeObjective, MinimizeObjective, CloseToTargetObjective]
AnyRealObjective = Union[
MaximizeObjective,
MinimizeObjective,
CloseToTargetObjective,
DesirabilityObjective,
IncreasingDesirabilityObjective,
DecreasingDesirabilityObjective,
PeakDesirabilityObjective,
]

AnyObjective = Union[
MaximizeObjective,
Expand All @@ -47,4 +62,8 @@
CloseToTargetObjective,
ConstrainedCategoricalObjective,
MovingMaximizeSigmoidObjective,
DesirabilityObjective,
IncreasingDesirabilityObjective,
DecreasingDesirabilityObjective,
PeakDesirabilityObjective,
]
221 changes: 221 additions & 0 deletions bofire/data_models/objectives/desirabilities.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,221 @@
from abc import abstractmethod
from typing import Literal, Optional, Union

import numpy as np
import pandas as pd
import pydantic

from bofire.data_models.objectives.identity import IdentityObjective


class DesirabilityObjective(IdentityObjective):
"""Abstract class for desirability objectives. Works as Identity Objective"""

type: Literal["DesirabilityObjective"] = "DesirabilityObjective" # type: ignore
clip: bool = True

@pydantic.model_validator(mode="after")
def validate_clip(self):
if self.clip:
return self

log_shapes = {
key: val
for (key, val) in self.__dict__.items()
if key.startswith("log_shape_factor")
}
for key, log_shape_ in log_shapes.items():
if log_shape_ != 0:
raise ValueError(
f"Log shape factor {key} must be zero if clip is False."
)
return self

def __call__(
self, x: Union[pd.Series, np.ndarray], x_adapt
) -> Union[pd.Series, np.ndarray]:
"""Wrapper function for to call numpy and torch functions with series or numpy arrays. matches __call__
signature of objectives."""

convert_to_series = False
if isinstance(x, pd.Series):
convert_to_series = True
name = x.name

Check failure on line 43 in bofire/data_models/objectives/desirabilities.py

View workflow job for this annotation

GitHub Actions / lint

Cannot access member "name" for type "ndarray[Unknown, Unknown]"   Member "name" is unknown (reportGeneralTypeIssues)
x = x.values

Check failure on line 44 in bofire/data_models/objectives/desirabilities.py

View workflow job for this annotation

GitHub Actions / lint

Cannot access member "values" for type "ndarray[Unknown, Unknown]"   Member "values" is unknown (reportGeneralTypeIssues)

y = self.call_numpy(x)

if convert_to_series:
return pd.Series(y, name=name)

Check failure on line 49 in bofire/data_models/objectives/desirabilities.py

View workflow job for this annotation

GitHub Actions / lint

"name" is possibly unbound (reportUnboundVariable)

return y

@abstractmethod
def call_numpy(self, x: np.ndarray) -> np.ndarray:
raise NotImplementedError()


class IncreasingDesirabilityObjective(DesirabilityObjective):
"""An objective returning a reward the scaled identity, but trimmed at the bounds:
d = ((x - lower_bound) / (upper_bound - lower_bound))^t
if clip is True, the reward is zero for x < lower_bound and one for x > upper_bound.
where:
t = exp(log_shape_factor)
Note, that with clipping the reward is always between zero and one.
Attributes:
clip (bool): Whether to clip the values below/above the lower/upper bound, by
default True.
log_shape_factor (float): Logarithm of the shape factor:
Whether the interpolation between the lower bound and the upper is linear (=0),
convex (>0) or concave (<0) , by default 0.0.
w (float): relative weight, by default = 1.
bounds (tuple[float]): lower and upper bound of the desirability. Below
bounds[0] the desirability is =0 (if clip=True) or <0 (if clip=False). Above
bounds[1] the desirability is =1 (if clip=True) or >1 (if clip=False).
Defaults to (0, 1).
"""

type: Literal["IncreasingDesirabilityObjective"] = "IncreasingDesirabilityObjective" # type: ignore
log_shape_factor: float = 0.0

def call_numpy(
self,
x: np.ndarray,
x_adapt: Optional[Union[pd.Series, np.ndarray]] = None,
) -> np.ndarray:
y = np.zeros(x.shape)
if self.clip:
y[x < self.lower_bound] = 0.0
y[x > self.upper_bound] = 1.0
between = (x >= self.lower_bound) & (x <= self.upper_bound)
else:
between = np.full(x.shape, True)

t = np.exp(self.log_shape_factor)

y[between] = np.power(
(x[between] - self.lower_bound) / (self.upper_bound - self.lower_bound), t
)

return y


class DecreasingDesirabilityObjective(DesirabilityObjective):
"""An objective returning a reward the negative, shifted scaled identity, but trimmed at the bounds:
d = ((upper_bound - x) / (upper_bound - lower_bound))^t
where:
t = exp(log_shape_factor)
Note, that with clipping the reward is always between zero and one.
Attributes:
clip (bool): Whether to clip the values below/above the lower/upper bound, by
default True.
log_shape_factor (float): Logarithm of the shape factor:
Whether the interpolation between the lower bound and the upper is linear (=0),
convex (>0) or concave (<0) , by default 0.0.
w (float): relative weight, by default = 1.
bounds (tuple[float]): lower and upper bound of the desirability. Below
bounds[0] the desirability is =1 (if clip=True) or >1 (if clip=False). Above
bounds[1] the desirability is =0 (if clip=True) or <0 (if clip=False).
Defaults to (0, 1).
"""

type: Literal["DecreasingDesirabilityObjective"] = "DecreasingDesirabilityObjective" # type: ignore
log_shape_factor: float = 0.0

def call_numpy(
self,
x: np.ndarray,
x_adapt: Optional[Union[pd.Series, np.ndarray]] = None,
) -> np.ndarray:
y = np.zeros(x.shape)
if self.clip:
y[x < self.lower_bound] = 1.0
y[x > self.upper_bound] = 0.0
between = (x >= self.lower_bound) & (x <= self.upper_bound)
else:
between = np.full(x.shape, True)

t = np.exp(self.log_shape_factor)

y[between] = np.power(
(self.upper_bound - x[between]) / (self.upper_bound - self.lower_bound), t
)

return y


class PeakDesirabilityObjective(DesirabilityObjective):
"""
A piecewise (linear or convex/concave) objective that increases from the lower bound
to the peak position and decreases from the peak position to the upper bound.
Attributes:
clip (bool): Whether to clip the values below/above the lower/upper bound, by
default True.
log_shape_factor (float): Logarithm of the shape factor for the increasing part:
Whether the interpolation between the lower bound and the peak is linear (=0),
convex (>1) or concave (<1) , by default 0.0.
log_shape_factor_decreasing (float): Logarithm of the shape factor for the
decreasing part. Whether the interpolation between the peak and the upper
bound is linear (=0), convex (>0) or concave (<0), by default 0.0.
peak_position (float): Position of the peak, by default 0.5.
w (float): relative weight: desirability, when x=peak_position, by default = 1.
bounds (tuple[float]): lower and upper bound of the desirability. Below
bounds[0] the desirability is =0 (if clip=True) or <0 (if clip=False). Above
bounds[1] the desirability is =0 (if clip=True) or <0 (if clip=False).
Defaults to (0, 1).
"""

type: Literal["PeakDesirabilityObjective"] = "PeakDesirabilityObjective" # type: ignore
log_shape_factor: float = 0.0
log_shape_factor_decreasing: float = 0.0 # often named log_t
peak_position: float = 0.5 # often named T

def call_numpy(
self,
x: np.ndarray,
x_adapt: Optional[Union[pd.Series, np.ndarray]] = None,
) -> np.ndarray:
y = np.zeros(x.shape)
if self.clip:
Incr = (x >= self.lower_bound) & (x <= self.peak_position)
Decr = (x <= self.upper_bound) & (x > self.peak_position)
else:
Incr, Decr = x <= self.peak_position, x > self.peak_position

s: float = np.exp(self.log_shape_factor)
t: float = np.exp(self.log_shape_factor_decreasing)
y[Incr] = np.power(
np.divide(
(x[Incr] - self.lower_bound), (self.peak_position - self.lower_bound)
),
s,
)
y[Decr] = np.power(
np.divide(
(x[Decr] - self.upper_bound), (self.peak_position - self.upper_bound)
),
t,
)

return y * self.w

@pydantic.model_validator(mode="after")
def validate_peak_position(self):
bounds = self.bounds
if self.peak_position < bounds[0] or self.peak_position > bounds[1]:
raise ValueError(
f"Peak position must be within bounds {bounds}, got {self.peak_position}"
)
return self
83 changes: 83 additions & 0 deletions bofire/utils/torch_tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,15 @@
CloseToTargetObjective,
ConstrainedCategoricalObjective,
ConstrainedObjective,
DecreasingDesirabilityObjective,
IncreasingDesirabilityObjective,
MaximizeObjective,
MaximizeSigmoidObjective,
MinimizeObjective,
MinimizeSigmoidObjective,
MovingMaximizeSigmoidObjective,
Objective,
PeakDesirabilityObjective,
TargetObjective,
)
from bofire.strategies.strategy import Strategy
Expand Down Expand Up @@ -431,6 +434,86 @@ def get_objective_callable(
)
)
)

if isinstance(objective, IncreasingDesirabilityObjective):

def objective_callable_(x: Tensor, *args) -> Tensor:
x = x[..., idx]

y = torch.zeros(x.shape, dtype=x.dtype, device=x.device)
if objective.clip:
y[x < objective.lower_bound] = 0.0
y[x > objective.upper_bound] = 1.0
between = (x >= objective.lower_bound) & (x <= objective.upper_bound)
else:
between = torch.full(x.shape, True, dtype=torch.bool, device=x.device)

t: float = np.exp(objective.log_shape_factor)

y[between] = torch.pow(
(x[between] - objective.lower_bound)
/ (objective.upper_bound - objective.lower_bound),
t,
)
return y

return objective_callable_

if isinstance(objective, DecreasingDesirabilityObjective):

def objective_callable_(x: Tensor, *args) -> Tensor:
x = x[..., idx]

y = torch.zeros(x.shape, dtype=x.dtype, device=x.device)
if objective.clip:
y[x < objective.lower_bound] = 1.0
y[x > objective.upper_bound] = 0.0
between = (x >= objective.lower_bound) & (x <= objective.upper_bound)
else:
between = torch.full(x.shape, True, dtype=torch.bool, device=x.device)

t: float = np.exp(objective.log_shape_factor)
y[between] = torch.pow(
(objective.upper_bound - x[between])
/ (objective.upper_bound - objective.lower_bound),
t,
)
return y

return objective_callable_

if isinstance(objective, PeakDesirabilityObjective):

def objective_callable_(x: Tensor, *args) -> Tensor:
x = x[..., idx]
y = torch.zeros(x.shape, dtype=x.dtype, device=x.device)

if objective.clip:
Incr = (x >= objective.lower_bound) & (x <= objective.peak_position)
Decr = (x <= objective.upper_bound) & (x > objective.peak_position)
else:
Incr, Decr = x <= objective.peak_position, x > objective.peak_position

s: float = np.exp(objective.log_shape_factor)
t: float = np.exp(objective.log_shape_factor_decreasing)
y[Incr] = torch.pow(
torch.divide(
(x[Incr] - objective.lower_bound),
(objective.peak_position - objective.lower_bound),
),
s,
)
y[Decr] = torch.pow(
torch.divide(
(x[Decr] - objective.upper_bound),
(objective.peak_position - objective.upper_bound),
),
t,
)
return y * objective.w

return objective_callable_

raise NotImplementedError(
f"Objective {objective.__class__.__name__} not implemented.",
)
Expand Down
Loading

0 comments on commit 4d6d6c6

Please sign in to comment.