Skip to content

Commit

Permalink
Added support for arrays, callbacks, and cheap_constraints in nevergrad
Browse files Browse the repository at this point in the history
  • Loading branch information
AaronYoung5 committed Feb 17, 2025
1 parent 0f03eb6 commit b1c0f41
Show file tree
Hide file tree
Showing 5 changed files with 199 additions and 36 deletions.
45 changes: 45 additions & 0 deletions plugins/hydra_nevergrad_sweeper/example/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,43 @@ hydra:
# number of parallel workers for performing function evaluations
num_workers: 10
# maximize: true # comment out for maximization

# Uncomment to load the dump and resume a failed run.
# load_if_exists: nevergrad.pkl

callbacks:
# Add a parameters logger callback.
parameters_logger:
name: tell
callback:
_target_: nevergrad.callbacks.ParametersLogger

# NOTE: logs will always overwrite the previous logs.
filepath: ${hydra.sweep.dir}/nevergrad.log
append: False

# Add a optimizer dump callback. We can load the dump to resume a failed run using `load_if_exists`.
optimizer_dump:
name: tell
callback:
_target_: nevergrad.callbacks.OptimizerDump

# Note, dumps will always overwrite the previous dumps.
filepath: ${hydra.sweep.dir}/nevergrad.pkl

# Add a progress bar callback.
progress_bar:
name: tell
callback:
_target_: nevergrad.callbacks.ProgressBar

# Cheap constraints prune the search space _before_ a parameterization is evaluated.
cheap_constraints:
lr_constraint:
_target_: __main__.lr_constraint_fn
_partial_: true
max_lr: 2.0

# default parametrization of the search space
parametrization:
# either one or the other
Expand All @@ -35,11 +72,19 @@ hydra:
lower: 4
upper: 16
integer: true
# an array which has optimizable values
# Can also set shape: [3] to optimize 3 values
arr:
init: [0.1, 0.2, 0.3]
lower: 0.0
upper: 1.0


db: cifar
lr: 0.01
dropout: 0.6
batch_size: 8
arr: [0.1, 0.2, 0.3]

# if true, simulate a failure by raising an exception
error: false
24 changes: 22 additions & 2 deletions plugins/hydra_nevergrad_sweeper/example/my_app.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,45 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import logging
from typing import Any, Dict

import hydra
from omegaconf import DictConfig

log = logging.getLogger(__name__)


def lr_constraint_fn(
parameterization: Dict[str, Any],
/,
*,
max_lr: int,
) -> bool:
"""This function is used to prune experiments for nevergrad sweepers. Returns
False if the experiment should be pruned, True otherwise.
"""

return parameterization["lr"] < max_lr


@hydra.main(version_base=None, config_path=".", config_name="config")
def dummy_training(cfg: DictConfig) -> float:
"""A dummy function to minimize
Minimum is 0.0 at:
lr = 0.12, dropout=0.33, db=mnist, batch_size=4
"""
print(cfg.arr)
print(sum(cfg.arr))
do = cfg.dropout
bs = cfg.batch_size
out = float(
abs(do - 0.33) + int(cfg.db == "mnist") + abs(cfg.lr - 0.12) + abs(bs - 4)
abs(do - 0.33)
+ int(cfg.db == "mnist")
+ abs(cfg.lr - 0.12)
+ abs(bs - 4)
+ sum(cfg.arr)
)
log.info(
f"dummy_training(dropout={do:.3f}, lr={cfg.lr:.3f}, db={cfg.db}, batch_size={bs}) = {out:.3f}",
f"dummy_training(dropout={do:.3f}, lr={cfg.lr:.3f}, db={cfg.db}, batch_size={bs}, arr={cfg.arr}) = {out:.3f}",
)
if cfg.error:
raise RuntimeError("cfg.error is True")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,10 @@
Optional,
Tuple,
Union,
Callable,
)

import numpy as np
import nevergrad as ng
from hydra.core import utils
from hydra.core.override_parser.overrides_parser import OverridesParser
Expand All @@ -25,36 +27,42 @@
from hydra.plugins.launcher import Launcher
from hydra.plugins.sweeper import Sweeper
from hydra.types import HydraContext, TaskFunction
from omegaconf import DictConfig, ListConfig, OmegaConf
from omegaconf import DictConfig, OmegaConf

from .config import OptimConf, ScalarConfigSpec
from .config import OptimConf, ScalarOrArrayConfigSpec, CheapConstraintFn

log = logging.getLogger(__name__)


def create_nevergrad_param_from_config(
config: Union[MutableSequence[Any], MutableMapping[str, Any]]
) -> Any:
if OmegaConf.is_config(config):
config = OmegaConf.to_container(config, resolve=True)
if isinstance(config, MutableSequence):
if isinstance(config, ListConfig):
config = OmegaConf.to_container(config, resolve=True) # type: ignore
return ng.p.Choice(config)
if isinstance(config, MutableMapping):
specs = ScalarConfigSpec(**config)
specs = ScalarOrArrayConfigSpec(**config)
init = ["init", "lower", "upper"]
init_params = {x: getattr(specs, x) for x in init}
if not specs.log:
scalar = ng.p.Scalar(**init_params)

if specs.shape or isinstance(init_params["init"], list):
if specs.shape:
init_params["shape"] = specs.shape
parameter = ng.p.Array(**init_params)
if specs.step is not None:
parameter.set_mutation(sigma=specs.step)
elif not specs.log:
parameter = ng.p.Scalar(**init_params)
if specs.step is not None:
scalar.set_mutation(sigma=specs.step)
parameter.set_mutation(sigma=specs.step)
else:
if specs.step is not None:
init_params["exponent"] = specs.step
scalar = ng.p.Log(**init_params)
parameter = ng.p.Log(**init_params)
if specs.integer:
scalar.set_integer_casting()
return scalar
return config
parameter.set_integer_casting()
return parameter


def create_nevergrad_parameter_from_override(override: Override) -> Any:
Expand Down Expand Up @@ -86,20 +94,24 @@ class NevergradSweeperImpl(Sweeper):
def __init__(
self,
optim: OptimConf,
parametrization: Optional[DictConfig],
parameterization: Optional[DictConfig],
):
self.opt_config = optim
self.config: Optional[DictConfig] = None
self.launcher: Optional[Launcher] = None
self.hydra_context: Optional[HydraContext] = None
self.job_results = None
self.parametrization: Dict[str, Any] = {}
if parametrization is not None:
assert isinstance(parametrization, DictConfig)
self.parametrization = {
self.parameterization: Dict[str, Any] = {}
if parameterization is not None:
assert isinstance(parameterization, DictConfig)
self.parameterization = {
str(x): create_nevergrad_param_from_config(y)
for x, y in parametrization.items()
for x, y in parameterization.items()
}
self.cheap_constraints: List[CheapConstraintFn] = []
if optim.cheap_constraints is not None:
for constraint in optim.cheap_constraints.values():
self.cheap_constraints.append(constraint)
self.job_idx: Optional[int] = None

def setup(
Expand All @@ -122,8 +134,8 @@ def sweep(self, arguments: List[str]) -> None:
assert self.job_idx is not None
direction = -1 if self.opt_config.maximize else 1
name = "maximization" if self.opt_config.maximize else "minimization"
# Override the parametrization from commandline
params = dict(self.parametrization)
# Override the parameterization from commandline
params = dict(self.parameterization)

parser = OverridesParser.create()
parsed = parser.parse_overrides(arguments)
Expand All @@ -133,9 +145,11 @@ def sweep(self, arguments: List[str]) -> None:
create_nevergrad_parameter_from_override(override)
)

parametrization = ng.p.Dict(**params)
parametrization.function.deterministic = not self.opt_config.noisy
parametrization.random_state.seed(self.opt_config.seed)
parameterization = ng.p.Dict(**params)
parameterization.function.deterministic = not self.opt_config.noisy
parameterization.random_state.seed(self.opt_config.seed)
for constraint in self.cheap_constraints:
parameterization.register_cheap_constraint(constraint)
# log and build the optimizer
opt = self.opt_config.optimizer
remaining_budget = self.opt_config.budget
Expand All @@ -144,19 +158,33 @@ def sweep(self, arguments: List[str]) -> None:
f"NevergradSweeper(optimizer={opt}, budget={remaining_budget}, "
f"num_workers={nw}) {name}"
)
log.info(f"with parametrization {parametrization}")
log.info(f"with parameterization {parameterization}")
log.info(f"Sweep output dir: {self.config.hydra.sweep.dir}")
optimizer = ng.optimizers.registry[opt](parametrization, remaining_budget, nw)
if self.opt_config.load_if_exists is not None and self.opt_config.load_if_exists.exists():
optimizer = ng.optimizers.registry[opt].load(self.opt_config.load_if_exists)
log.info(f"Resuming nevergrad optimization from budget={optimizer.num_ask} or {remaining_budget}")
remaining_budget -= optimizer.num_ask
self.job_idx = optimizer.num_ask
else:
log.info(f"Initializing optimizer from scratch with budget={remaining_budget}")
optimizer = ng.optimizers.registry[opt](parameterization, remaining_budget, nw)
for callback_spec in self.opt_config.callbacks.values():
optimizer.register_callback(callback_spec.name, callback_spec.callback)
# loop!
all_returns: List[Any] = []
best: Tuple[float, ng.p.Parameter] = (float("inf"), parametrization)
best: Tuple[float, ng.p.Parameter] = (float("inf"), parameterization)
while remaining_budget > 0:
batch = min(nw, remaining_budget)
remaining_budget -= batch
candidates = [optimizer.ask() for _ in range(batch)]
overrides = list(
tuple(f"{x}={y}" for x, y in c.value.items()) for c in candidates
tuple(
f"{x}={y.tolist() if isinstance(y, np.ndarray) else y}"
for x, y in c.value.items()
)
for c in candidates
)

self.validate_batch_is_legal(overrides)
returns = self.launcher.launch(overrides, initial_job_idx=self.job_idx)
# would have been nice to avoid waiting for all jobs to finish
Expand Down Expand Up @@ -189,7 +217,9 @@ def sweep(self, arguments: List[str]) -> None:
recom = optimizer.provide_recommendation()
results_to_serialize = {
"name": "nevergrad",
"best_evaluated_params": best[1].value,
"best_evaluated_params": {
k: v.tolist() if isinstance(v, np.ndarray) else v for k, v in best[1].value.items()
},
"best_evaluated_result": direction * best[0],
}
OmegaConf.save(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,37 +1,64 @@
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from dataclasses import dataclass, field
from typing import Any, Dict, Optional
from typing import Any, Dict, Optional, List, Tuple, TypeAlias, Union, Callable
from pathlib import Path

from hydra.core.config_store import ConfigStore


CheapConstraintFn: TypeAlias = Any
"""A cheap function that can be used to prune bad candidates early.
See https://facebookresearch.github.io/nevergrad/\
optimization.html#optimization-with-constraints for more details.
Actual type: Callable[[Dict[str, Any]], Union[bool, float]]
"""


@dataclass
class ScalarConfigSpec:
class ScalarOrArrayConfigSpec:
"""Representation of all the options to define
a scalar.
"""

# lower bound if any
lower: Optional[float] = None
lower: Optional[float | List[float]] = None

# upper bound if any
upper: Optional[float] = None
upper: Optional[float | List[float]] = None

# initial value
# default to the middle point if completely bounded
init: Optional[float] = None
# ng.p.Array used if init is set
init: Optional[float | List[float]] = None

# step size for an update
# defaults to 1 if unbounded
# or 1/6 of the range if completely bounded
step: Optional[float] = None
step: Optional[float | List[float]] = None

# cast to integer
integer: bool = False

# logarithmically distributed
# unused for array types
log: bool = False

# shape of the array
# if set, ng.p.Array is used
shape: Optional[Tuple[int]] = None


@dataclass
class CallbackConfigSpec:
"""Representation of all the options to define a callback."""

# name of the callback. either "ask" or "tell"
name: str

# callback function
# Actual type: Callable[[ng.optimizers.base.Optimizers], None]
callback: Any


@dataclass
class OptimConf:
Expand Down Expand Up @@ -65,6 +92,22 @@ class OptimConf:
# maximum authorized failure rate for a batch of parameters
max_failure_rate: float = 0.0

# Define cheap constraints configuration via Python methods.
# If given, `cheap_constraints` should be a dict of callables with the signature
# Callable[[Dict[str, Any]], float | bool]. The input dict is the parameterization
# of the trial.
# https://facebookresearch.github.io/nevergrad/optimization.html#optimization-with-constraints
cheap_constraints: Dict[str, CheapConstraintFn] = field(default_factory=dict)

# These are callbacks that are passed to the optimizer via the `register_callback`
# method. See the Nevergrad documentation for more information.
# https://facebookresearch.github.io/nevergrad/optimizers_ref.html#nevergrad.optimizers.base.Optimizer.register_callback
callbacks: Dict[str, CallbackConfigSpec] = field(default_factory=dict)

# Load an existing study and resume it. This is the path to the saved optimizer
# pickle. You can pickle the optimizer via the OptimizerDump callback.
load_if_exists: Optional[Path] = None


@dataclass
class NevergradSweeperConf:
Expand Down
Loading

0 comments on commit b1c0f41

Please sign in to comment.