From 5342a6413dec2a9215adbb693bf40636fb81a1cd Mon Sep 17 00:00:00 2001 From: Sait Cakmak Date: Mon, 28 Oct 2024 11:03:48 -0700 Subject: [PATCH] Add `optimize_acqf_mixed_alternating` to `fast_botorch_optimize_context_manager` Summary: The previous diff adds mixed optimizer to MBM. This diff adds it to fast optimizer mocks. Differential Revision: D65067691 --- ax/utils/testing/mock.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/ax/utils/testing/mock.py b/ax/utils/testing/mock.py index d4183162838..1d8d28aefe4 100644 --- a/ax/utils/testing/mock.py +++ b/ax/utils/testing/mock.py @@ -17,6 +17,7 @@ gen_batch_initial_conditions, gen_one_shot_kg_initial_conditions, ) +from botorch.optim.optimize_acqf_mixed import optimize_acqf_mixed_alternating from scipy.optimize import OptimizeResult from torch import Tensor @@ -57,6 +58,19 @@ def minimal_gen_os_ics(*args: Any, **kwargs: Any) -> Tensor | None: def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None: fit_fully_bayesian_model_nuts(*args, **_get_minimal_mcmc_kwargs(**kwargs)) + def minimal_mixed_optimizer(*args: Any, **kwargs: Any) -> tuple[Tensor, Tensor]: + kwargs["raw_samples"] = 2 + kwargs["num_restarts"] = 1 + kwargs["options"].update( + { + "maxiter_alternating": 1, + "maxiter_continuous": 1, + "maxiter_init": 1, + "maxiter_discrete": 1, + } + ) + return optimize_acqf_mixed_alternating(*args, **kwargs) + with ExitStack() as es: mock_generation = es.enter_context( mock.patch( @@ -93,6 +107,14 @@ def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None: ) ) + mock_mixed_optimizer = es.enter_context( + mock.patch( + "ax.models.torch.botorch_modular.acquisition." + "optimize_acqf_mixed_alternating", + wraps=minimal_mixed_optimizer, + ) + ) + yield if (not force) and all( @@ -103,6 +125,7 @@ def minimal_fit_fully_bayesian(*args: Any, **kwargs: Any) -> None: mock_gen_ics, mock_gen_os_ics, mock_mcmc_mbm, + mock_mixed_optimizer, ] ): raise AssertionError(