Skip to content

Commit

Permalink
Pass scheduler_options to benchmark method getters (#1774)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: #1774

This makes it easy to run benchmarks with `q>1`.

Reviewed By: esantorella

Differential Revision: D47484841

fbshipit-source-id: e494a7666ad5af032c8d8bb834e859f10c0a32f4
  • Loading branch information
David Eriksson authored and facebook-github-bot committed Aug 11, 2023
1 parent 1cc89e9 commit 7fcdb3d
Show file tree
Hide file tree
Showing 5 changed files with 69 additions and 22 deletions.
7 changes: 6 additions & 1 deletion ax/benchmark/methods/choose_generation_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Optional

from ax.benchmark.benchmark_method import (
BenchmarkMethod,
get_sequential_optimization_scheduler_options,
)
from ax.benchmark.benchmark_problem import BenchmarkProblemBase
from ax.modelbridge.dispatch_utils import choose_generation_strategy
from ax.service.scheduler import SchedulerOptions


def get_choose_generation_strategy_method(
problem: BenchmarkProblemBase,
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = choose_generation_strategy(
search_space=problem.search_space,
Expand All @@ -23,5 +27,6 @@ def get_choose_generation_strategy_method(
return BenchmarkMethod(
name=f"ChooseGenerationStrategy::{problem.name}",
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)
17 changes: 13 additions & 4 deletions ax/benchmark/methods/gpei_and_moo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,20 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Optional

from ax.benchmark.benchmark_method import (
BenchmarkMethod,
get_sequential_optimization_scheduler_options,
)
from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy
from ax.modelbridge.registry import Models
from ax.service.scheduler import SchedulerOptions


def get_gpei_default() -> BenchmarkMethod:
def get_gpei_default(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name="SOBOL+GPEI::default",
steps=[
Expand All @@ -27,11 +32,14 @@ def get_gpei_default() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_moo_default() -> BenchmarkMethod:
def get_moo_default(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name="SOBOL+MOO::default",
steps=[
Expand All @@ -47,5 +55,6 @@ def get_moo_default() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)
45 changes: 33 additions & 12 deletions ax/benchmark/methods/modular_botorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy
from ax.modelbridge.registry import Models
from ax.models.torch.botorch_modular.surrogate import Surrogate
from ax.service.scheduler import SchedulerOptions
from ax.utils.common.constants import Keys
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.acquisition.monte_carlo import qNoisyExpectedImprovement
Expand All @@ -22,7 +23,9 @@
from botorch.models.gp_regression import FixedNoiseGP


def get_sobol_botorch_modular_fixed_noise_gp_qnei() -> BenchmarkMethod:
def get_sobol_botorch_modular_fixed_noise_gp_qnei(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
model_gen_kwargs = {
"model_gen_options": {
Keys.OPTIMIZER_KWARGS: {
Expand Down Expand Up @@ -57,11 +60,14 @@ def get_sobol_botorch_modular_fixed_noise_gp_qnei() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_fixed_noise_gp_qnehvi() -> BenchmarkMethod:
def get_sobol_botorch_modular_fixed_noise_gp_qnehvi(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
model_gen_kwargs = {
"model_gen_options": {
Keys.OPTIMIZER_KWARGS: {
Expand Down Expand Up @@ -100,18 +106,24 @@ def get_sobol_botorch_modular_fixed_noise_gp_qnehvi() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp_qnei() -> BenchmarkMethod: # noqa
def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp_qnei(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod: # noqa
return get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp(
qNoisyExpectedImprovement
qNoisyExpectedImprovement,
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp(
botorch_acqf_class: Type[AcquisitionFunction],
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod: # noqa
generation_strategy = GenerationStrategy(
name="SOBOL+BOTORCH_MODULAR::SaasFullyBayesianSingleTaskGP_"
Expand All @@ -135,11 +147,14 @@ def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp(
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp_qnehvi() -> BenchmarkMethod: # noqa
def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp_qnehvi(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod: # noqa
generation_strategy = GenerationStrategy(
name="SOBOL+BOTORCH_MODULAR::SaasFullyBayesianSingleTaskGP_qNoisyExpectedHypervolumeImprovement", # noqa
steps=[
Expand All @@ -164,11 +179,14 @@ def get_sobol_botorch_modular_saas_fully_bayesian_single_task_gp_qnehvi() -> Ben
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_default() -> BenchmarkMethod:
def get_sobol_botorch_modular_default(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name="SOBOL+BOTORCH_MODULAR::default",
steps=[
Expand All @@ -184,13 +202,15 @@ def get_sobol_botorch_modular_default() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_sobol_botorch_modular_acquisition(
acquisition_cls: Type[AcquisitionFunction],
acquisition_options: Optional[Dict[str, Any]] = None,
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name=f"SOBOL+BOTORCH_MODULAR::{acquisition_cls.__name__}",
Expand All @@ -215,5 +235,6 @@ def get_sobol_botorch_modular_acquisition(
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)
17 changes: 13 additions & 4 deletions ax/benchmark/methods/saasbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,20 @@
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.

from typing import Optional

from ax.benchmark.benchmark_method import (
BenchmarkMethod,
get_sequential_optimization_scheduler_options,
)
from ax.modelbridge.generation_strategy import GenerationStep, GenerationStrategy
from ax.modelbridge.registry import Models
from ax.service.scheduler import SchedulerOptions


def get_saasbo_default() -> BenchmarkMethod:
def get_saasbo_default(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name="SOBOL+FULLYBAYESIAN::default",
steps=[
Expand All @@ -27,11 +32,14 @@ def get_saasbo_default() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)


def get_saasbo_moo_default() -> BenchmarkMethod:
def get_saasbo_moo_default(
scheduler_options: Optional[SchedulerOptions] = None,
) -> BenchmarkMethod:
generation_strategy = GenerationStrategy(
name="SOBOL+FULLYBAYESIANMOO::default",
steps=[
Expand All @@ -47,5 +55,6 @@ def get_saasbo_moo_default() -> BenchmarkMethod:
return BenchmarkMethod(
name=generation_strategy.name,
generation_strategy=generation_strategy,
scheduler_options=get_sequential_optimization_scheduler_options(),
scheduler_options=scheduler_options
or get_sequential_optimization_scheduler_options(),
)
5 changes: 4 additions & 1 deletion ax/benchmark/tests/test_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import numpy as np

from ax.benchmark.benchmark import benchmark_replication
from ax.benchmark.benchmark_method import get_sequential_optimization_scheduler_options
from ax.benchmark.methods.modular_botorch import get_sobol_botorch_modular_acquisition
from ax.benchmark.problems.registry import get_problem
from ax.modelbridge.registry import Models
Expand All @@ -17,6 +18,7 @@
class TestMethods(TestCase):
def test_mbm_acquisition(self) -> None:
method = get_sobol_botorch_modular_acquisition(
scheduler_options=get_sequential_optimization_scheduler_options(),
acquisition_cls=qKnowledgeGradient,
acquisition_options={"num_fantasies": 16},
)
Expand All @@ -33,7 +35,8 @@ def test_mbm_acquisition(self) -> None:
def test_benchmark_replication_runs(self) -> None:
problem = get_problem(problem_name="ackley4")
method = get_sobol_botorch_modular_acquisition(
acquisition_cls=qKnowledgeGradient
scheduler_options=get_sequential_optimization_scheduler_options(),
acquisition_cls=qKnowledgeGradient,
)
n_sobol_trials = method.generation_strategy._steps[0].num_trials
# Only run one non-Sobol trial
Expand Down

0 comments on commit 7fcdb3d

Please sign in to comment.