Skip to content

Commit

Permalink
[Tune] Nevergrad optimizer with extra parameters (#31015)
Browse files Browse the repository at this point in the history
Some Nevergrad search algorithms have required inputs, such as `budget` for the `NgOpt` search algorithm, but it is not possible with the NevergradSearch class to pass these parameters down to the search algorithm. I would propose adding something like optimizer_kwargs to the NevergradSearch that get passed to the optimizer when instantiating it.

Signed-off-by: yhna <[email protected]>
Signed-off-by: YH <[email protected]>
Signed-off-by: Younghwan Na <[email protected]>
Signed-off-by: yhna940 <[email protected]>
Co-authored-by: Justin Yu <[email protected]>
  • Loading branch information
yhna940 and justinvyu authored Jan 23, 2023
1 parent ee23cc8 commit 33d4b14
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 6 deletions.
24 changes: 19 additions & 5 deletions python/ray/tune/search/nevergrad/nevergrad_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,12 @@ class NevergradSearch(Searcher):
$ pip install nevergrad
Parameters:
optimizer: Optimizer provided
from Nevergrad. Alter
optimizer: Optimizer class provided from Nevergrad.
See here for available optimizers:
https://facebookresearch.github.io/nevergrad/optimizers_ref.html#optimizers
This can also be an instance of a `ConfiguredOptimizer`. See the
section on configured optimizers in the above link.
optimizer_kwargs: Kwargs passed in when instantiating the `optimizer`
space: Nevergrad parametrization
to be passed to optimizer on instantiation, or list of parameter
names if you passed an optimizer object.
Expand Down Expand Up @@ -120,11 +124,11 @@ def __init__(
optimizer: Optional[
Union[Optimizer, Type[Optimizer], ConfiguredOptimizer]
] = None,
optimizer_kwargs: Optional[Dict] = None,
space: Optional[Union[Dict, Parameter]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
points_to_evaluate: Optional[List[Dict]] = None,
**kwargs,
):
assert (
ng is not None
Expand All @@ -134,11 +138,12 @@ def __init__(
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."

super(NevergradSearch, self).__init__(metric=metric, mode=mode, **kwargs)
super(NevergradSearch, self).__init__(metric=metric, mode=mode)

self._space = None
self._opt_factory = None
self._nevergrad_opt = None
self._optimizer_kwargs = optimizer_kwargs or {}

if points_to_evaluate is None:
self._points_to_evaluate = None
Expand Down Expand Up @@ -166,6 +171,13 @@ def __init__(
"pass a list of parameter names or None as the `space` "
"parameter."
)
if self._optimizer_kwargs:
raise ValueError(
"If you pass in optimizer kwargs, either pass "
"an `Optimizer` subclass or an instance of "
"`ConfiguredOptimizer`."
)

self._parameters = space
self._nevergrad_opt = optimizer
elif (
Expand All @@ -187,7 +199,9 @@ def __init__(

def _setup_nevergrad(self):
if self._opt_factory:
self._nevergrad_opt = self._opt_factory(self._space)
self._nevergrad_opt = self._opt_factory(
self._space, **self._optimizer_kwargs
)

# nevergrad.tell internally minimizes, so "max" => -1
if self._mode == "max":
Expand Down
6 changes: 6 additions & 0 deletions python/ray/tune/tests/test_searchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,12 @@ def testNevergrad(self):
)
self.assertCorrectExperimentOutput(out)

def testNevergradWithRequiredOptimizerKwargs(self):
from ray.tune.search.nevergrad import NevergradSearch
import nevergrad as ng

NevergradSearch(optimizer=ng.optimizers.CM, optimizer_kwargs=dict(budget=16))

def testOptuna(self):
from ray.tune.search.optuna import OptunaSearch
from optuna.samplers import RandomSampler
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/tests/test_tune_restore_warm_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def cost(space, reporter):

search_alg = NevergradSearch(
optimizer,
parameter_names,
space=parameter_names,
metric="loss",
mode="min",
)
Expand Down

0 comments on commit 33d4b14

Please sign in to comment.