Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Tune] Nevergrad optimizer with extra parameters #31015

Merged
merged 14 commits into from
Jan 23, 2023
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion python/ray/tune/search/nevergrad/nevergrad_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,8 @@ class NevergradSearch(Searcher):
Parameters:
optimizer: Optimizer provided
from Nevergrad. Alter
yhna940 marked this conversation as resolved.
Show resolved Hide resolved
optimizer_kwargs: Dictionary used
to instantiate the Nevergrad optimizer.
space: Nevergrad parametrization
to be passed to optimizer on instantiation, or list of parameter
names if you passed an optimizer object.
Expand Down Expand Up @@ -120,6 +122,7 @@ def __init__(
optimizer: Optional[
Union[Optimizer, Type[Optimizer], ConfiguredOptimizer]
] = None,
optimizer_kwargs: Dict = dict(),
yhna940 marked this conversation as resolved.
Show resolved Hide resolved
space: Optional[Union[Dict, Parameter]] = None,
metric: Optional[str] = None,
mode: Optional[str] = None,
Expand All @@ -139,6 +142,7 @@ def __init__(
self._space = None
self._opt_factory = None
self._nevergrad_opt = None
self._optimizer_kwargs = optimizer_kwargs
yhna940 marked this conversation as resolved.
Show resolved Hide resolved

if points_to_evaluate is None:
self._points_to_evaluate = None
Expand Down Expand Up @@ -166,6 +170,13 @@ def __init__(
"pass a list of parameter names or None as the `space` "
"parameter."
)
if self._optimizer_kwargs:
raise ValueError(
justinvyu marked this conversation as resolved.
Show resolved Hide resolved
"If you pass a optimizer kwargs to Nevergrad, either "
"pass a class of the `Optimizer` or a instance of "
"the `ConfiguredOptimizer`."
)

self._parameters = space
self._nevergrad_opt = optimizer
elif (
Expand All @@ -187,7 +198,9 @@ def __init__(

def _setup_nevergrad(self):
if self._opt_factory:
self._nevergrad_opt = self._opt_factory(self._space)
self._nevergrad_opt = self._opt_factory(
self._space, **self._optimizer_kwargs
)

# nevergrad.tell internally minimizes, so "max" => -1
if self._mode == "max":
Expand Down
20 changes: 20 additions & 0 deletions python/ray/tune/tests/test_searchers.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,26 @@ def testNevergrad(self):
best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)

def testNevergradWithKwargs(self):
from ray.tune.search.nevergrad import NevergradSearch
import nevergrad as ng

np.random.seed(2020) # At least one nan, inf, -inf and float

out = tune.run(
_invalid_objective,
search_alg=NevergradSearch(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we limit the test to just the searcher initialization? Just making sure that initializing NevergradSearch with an optimizer that requires args without erroring should be good enough.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I totally agree with you. Therefore, for this test case, only verify that nevergrad is instantiated. thanks.

optimizer=ng.optimizers.CM, optimizer_kwargs=dict(budget=16)
),
config=self.config,
mode="max",
num_samples=16,
reuse_actors=False,
)

best_trial = out.best_trial
self.assertLessEqual(best_trial.config["report"], 2.0)

def testOptuna(self):
from ray.tune.search.optuna import OptunaSearch
from optuna.samplers import RandomSampler
Expand Down