Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Tune] Nevergrad optimizer with extra parameters #31015

Merged
merged 14 commits into from
Jan 23, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 19 additions & 5 deletions python/ray/tune/search/nevergrad/nevergrad_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,12 @@ class NevergradSearch(Searcher):
$ pip install nevergrad

Parameters:
optimizer: Optimizer provided
from Nevergrad. Alter
optimizer: Optimizer class provided from Nevergrad.
See here for available optimizers:
https://facebookresearch.github.io/nevergrad/optimizers_ref.html#optimizers
This can also be an instance of a `ConfiguredOptimizer`. See the
section on configured optimizers in the above link.
optimizer_kwargs: Kwargs passed in when instantiating the `optimizer`
space: Nevergrad parametrization
to be passed to optimizer on instantiation, or list of parameter
names if you passed an optimizer object.
Expand Down Expand Up @@ -120,11 +124,11 @@ def __init__(
optimizer: Optional[
Union[Optimizer, Type[Optimizer], ConfiguredOptimizer]
] = None,
optimizer_kwargs: Optional[Dict] = None,
space: Optional[Union[Dict, Parameter]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
points_to_evaluate: Optional[List[Dict]] = None,
**kwargs,
):
assert (
ng is not None
Expand All @@ -134,11 +138,12 @@ def __init__(
if mode:
assert mode in ["min", "max"], "`mode` must be 'min' or 'max'."

super(NevergradSearch, self).__init__(metric=metric, mode=mode, **kwargs)
super(NevergradSearch, self).__init__(metric=metric, mode=mode)

self._space = None
self._opt_factory = None
self._nevergrad_opt = None
self._optimizer_kwargs = optimizer_kwargs or {}

if points_to_evaluate is None:
self._points_to_evaluate = None
Expand Down Expand Up @@ -166,6 +171,13 @@ def __init__(
"pass a list of parameter names or None as the `space` "
"parameter."
)
if self._optimizer_kwargs:
raise ValueError(
justinvyu marked this conversation as resolved.
Show resolved Hide resolved
"If you pass in optimizer kwargs, either pass "
"an `Optimizer` subclass or an instance of "
"`ConfiguredOptimizer`."
)

self._parameters = space
self._nevergrad_opt = optimizer
elif (
Expand All @@ -187,7 +199,9 @@ def __init__(

def _setup_nevergrad(self):
if self._opt_factory:
self._nevergrad_opt = self._opt_factory(self._space)
self._nevergrad_opt = self._opt_factory(
self._space, **self._optimizer_kwargs
)

# nevergrad.tell internally minimizes, so "max" => -1
if self._mode == "max":
Expand Down
6 changes: 6 additions & 0 deletions python/ray/tune/tests/test_searchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -272,6 +272,12 @@ def testNevergrad(self):
)
self.assertCorrectExperimentOutput(out)

def testNevergradWithRequiredOptimizerKwargs(self):
from ray.tune.search.nevergrad import NevergradSearch
import nevergrad as ng

NevergradSearch(optimizer=ng.optimizers.CM, optimizer_kwargs=dict(budget=16))

def testOptuna(self):
from ray.tune.search.optuna import OptunaSearch
from optuna.samplers import RandomSampler
Expand Down
2 changes: 1 addition & 1 deletion python/ray/tune/tests/test_tune_restore_warm_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def cost(space, reporter):

search_alg = NevergradSearch(
optimizer,
parameter_names,
space=parameter_names,
metric="loss",
mode="min",
)
Expand Down