Skip to content

Commit

Permalink
Add optimize_acqf_mixed_alternating to `fast_botorch_optimize_conte…
Browse files Browse the repository at this point in the history
…xt_manager`

Summary: The previous diff adds mixed optimizer to MBM. This diff adds it to fast optimizer mocks.

Differential Revision: D65067691
  • Loading branch information
saitcakmak authored and facebook-github-bot committed Oct 28, 2024
1 parent ac25395 commit 5342a64
Showing 1 changed file with 23 additions and 0 deletions.
23 changes: 23 additions & 0 deletions ax/utils/testing/mock.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
gen_batch_initial_conditions,
gen_one_shot_kg_initial_conditions,
)
from botorch.optim.optimize_acqf_mixed import optimize_acqf_mixed_alternating
from scipy.optimize import OptimizeResult
from torch import Tensor

Expand Down Expand Up @@ -57,6 +58,19 @@ def minimal_gen_os_ics(*args: Any, **kwargs: Any) -> Tensor | None:
def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None:
fit_fully_bayesian_model_nuts(*args, **_get_minimal_mcmc_kwargs(**kwargs))

def minimal_mixed_optimizer(*args: Any, **kwargs: Any) -> tuple[Tensor, Tensor]:
kwargs["raw_samples"] = 2
kwargs["num_restarts"] = 1
kwargs["options"].update(
{
"maxiter_alternating": 1,
"maxiter_continuous": 1,
"maxiter_init": 1,
"maxiter_discrete": 1,
}
)
return optimize_acqf_mixed_alternating(*args, **kwargs)

with ExitStack() as es:
mock_generation = es.enter_context(
mock.patch(
Expand Down Expand Up @@ -93,6 +107,14 @@ def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None:
)
)

mock_mixed_optimizer = es.enter_context(
mock.patch(
"ax.models.torch.botorch_modular.acquisition."
"optimize_acqf_mixed_alternating",
wraps=minimal_mixed_optimizer,
)
)

yield

if (not force) and all(
Expand All @@ -103,6 +125,7 @@ def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None:
mock_gen_ics,
mock_gen_os_ics,
mock_mcmc_mbm,
mock_mixed_optimizer,
]
):
raise AssertionError(
Expand Down

0 comments on commit 5342a64

Please sign in to comment.