Skip to content

Commit

Permalink
update OptimizationLogger and callback tests for MOO (#1209)
Browse files Browse the repository at this point in the history
* updated callback tests for MOO

* removed stray print

* Delete =

* updated optimization logger for MOO and added get_min_losses function

* edited get_min_losses

* type fixes

* removed testing statement

* removed testing statement

* more efficient get min losses

* added optimization logger test for single objective

Co-authored-by: Theodore Clarke <theoajclarke@devfair0149.h2.fair>
  • Loading branch information
theoajc and Theodore Clarke authored Aug 18, 2021
1 parent e373c56 commit dd03c6f
Show file tree
Hide file tree
Showing 3 changed files with 51 additions and 8 deletions.
13 changes: 11 additions & 2 deletions nevergrad/optimization/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,17 @@ def __call__(self, optimizer: base.Optimizer, *args: tp.Any, **kwargs: tp.Any) -
if time.time() >= self._next_time or self._next_tell >= optimizer.num_tell:
self._next_time = time.time() + self._log_interval_seconds
self._next_tell = optimizer.num_tell + self._log_interval_tells
x = optimizer.provide_recommendation()
self._logger.log(self._log_level, "After %s, recommendation is %s", optimizer.num_tell, x)
if optimizer.num_objectives == 1:
x = optimizer.provide_recommendation()
self._logger.log(self._log_level, "After %s, recommendation is %s", optimizer.num_tell, x)
else:
losses = optimizer._hypervolume_pareto.get_min_losses() # type: ignore
self._logger.log(
self._log_level,
"After %s, the respective minimum loss for each objective in the pareto front is %s",
optimizer.num_tell,
losses,
)


class ParametersLogger:
Expand Down
3 changes: 3 additions & 0 deletions nevergrad/optimization/multiobjective/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -174,6 +174,9 @@ def pareto_front(
"""
return self._pf.get_front(size, subset, subset_tentatives)

def get_min_losses(self) -> tp.List[float]:
return np.min([p.losses for p in self._pf.get_raw()], axis=0)


class ParetoFront:
def __init__(
Expand Down
43 changes: 37 additions & 6 deletions nevergrad/optimization/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,20 +103,51 @@ def test_progressbar_dump(tmp_path: Path) -> None:
optimizer.tell(cand, 0)


class _EarlyStoppingTestee:
def __init__(self) -> None:
self.num_calls = 0

def __call__(self, *args, **kwds) -> float:
self.num_calls += 1
return np.random.rand()


def test_early_stopping() -> None:
instrum = ng.p.Instrumentation(
None, 2.0, blublu="blublu", array=ng.p.Array(shape=(3, 2)), multiobjective=True
)
instrum = ng.p.Instrumentation(None, 2.0, blublu="blublu", array=ng.p.Array(shape=(3, 2)))
func = _EarlyStoppingTestee()
optimizer = optimizerlib.OnePlusOne(parametrization=instrum, budget=100)
early_stopping = ng.callbacks.EarlyStopping(lambda opt: opt.num_ask > 3)
optimizer.register_callback("ask", early_stopping)
optimizer.minimize(_func, verbosity=2)
# below functions are inlcuded in the docstring
optimizer.minimize(func, verbosity=2)
# num_ask is set at the end of ask, so the callback sees the old value.
assert func.num_calls == 4
# below functions are included in the docstring of EarlyStopping
assert optimizer.current_bests["minimum"].mean < 12
assert optimizer.recommend().loss < 12 # type: ignore


def test_optimization_logger(caplog) -> None:
instrum = ng.p.Instrumentation(
None, 2.0, blublu="blublu", array=ng.p.Array(shape=(3, 2)), multiobjective=False
)
logging.basicConfig(level=os.environ.get("LOGLEVEL", "INFO"))
logger = logging.getLogger(__name__)
optimizer = optimizerlib.OnePlusOne(parametrization=instrum, budget=3)
optimizer.register_callback(
"tell",
callbacks.OptimizationLogger(
logger=logger, log_level=logging.INFO, log_interval_tells=10, log_interval_seconds=0.1
),
)
with caplog.at_level(logging.INFO):
optimizer.minimize(_func, verbosity=2)
assert (
"After 0, recommendation is Instrumentation(Tuple(None,2.0),Dict(array=Array{(3,2)},blublu=blublu,multiobjective=False))"
in caplog.text
)


def test_optimization_logger_MOO(caplog) -> None:
instrum = ng.p.Instrumentation(
None, 2.0, blublu="blublu", array=ng.p.Array(shape=(3, 2)), multiobjective=True
)
Expand All @@ -132,6 +163,6 @@ def test_optimization_logger(caplog) -> None:
with caplog.at_level(logging.INFO):
optimizer.minimize(_func, verbosity=2)
assert (
"After 0, recommendation is Instrumentation(Tuple(None,2.0),Dict(array=Array{(3,2)},blublu=blublu,multiobjective=True))"
"After 0, the respective minimum loss for each objective in the pareto front is [12. 12.]"
in caplog.text
)

0 comments on commit dd03c6f

Please sign in to comment.