Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Probabilities of Feasibility for Classifier based constraints in Acquisition Functions #2776

Draft
wants to merge 1 commit into
base: main
Choose a base branch
from
Draft
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 18 additions & 1 deletion botorch/acquisition/monte_carlo.py
Original file line number Diff line number Diff line change
@@ -47,7 +47,10 @@
from botorch.exceptions.warnings import legacy_ei_numerics_warning
from botorch.models.model import Model
from botorch.sampling.base import MCSampler
from botorch.utils.objective import compute_smoothed_feasibility_indicator
from botorch.utils.objective import (
compute_probabilities_of_feasibility_indicator,
compute_smoothed_feasibility_indicator,
)
from botorch.utils.transforms import (
concatenate_pending_points,
match_batch_shape,
@@ -188,6 +191,7 @@ def __init__(
sample_reduction: SampleReductionProtocol = torch.mean,
q_reduction: SampleReductionProtocol = torch.amax,
constraints: list[Callable[[Tensor], Tensor]] | None = None,
probabilities_of_feasibility: list[Callable[[Tensor], Tensor]] | None = None,
eta: Tensor | float = 1e-3,
fat: bool = False,
):
@@ -248,6 +252,7 @@ def __init__(
self._sample_reduction = partial(sample_reduction, dim=sample_dim)
self._q_reduction = partial(q_reduction, dim=-1)
self._constraints = constraints
self._probabilities_of_feasibility = probabilities_of_feasibility
self._eta = eta
self._fat = fat

@@ -328,6 +333,18 @@ def _apply_constraints(self, acqval: Tensor, samples: Tensor) -> Tensor:
fat=self._fat,
)
acqval = acqval.add(ind) if self._log else acqval.mul(ind)
if self._probabilities_of_feasibility is not None:
if not self._log and (acqval < 0).any():
raise ValueError(
"Constraint-weighting requires unconstrained "
"acquisition values to be non-negative."
)
ind = compute_probabilities_of_feasibility_indicator(
probabilities_of_feasibility=self._probabilities_of_feasibility,
samples=samples,
log=self._log,
)
acqval = acqval.add(ind) if self._log else acqval.mul(ind)
return acqval


12 changes: 12 additions & 0 deletions botorch/utils/objective.py
Original file line number Diff line number Diff line change
@@ -180,6 +180,18 @@ def compute_smoothed_feasibility_indicator(
return is_feasible if log else is_feasible.exp()


def compute_probabilities_of_feasibility_indicator(
probabilties_of_feasibility: list[Callable[[Tensor], Tensor]],
samples: Tensor,
log: bool = False,
) -> Tensor:
is_feasible = torch.zeros_like(samples[..., 0])
for constraint in probabilties_of_feasibility:
# 1 means feasible, zero means not feasible
is_feasible = is_feasible + constraint(samples).log()
Comment on lines +189 to +191
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

"constraint" is probably not the right variable name here

return is_feasible if log else is_feasible.exp()


def apply_constraints(
obj: Tensor,
constraints: list[Callable[[Tensor], Tensor]],