diff --git a/python/ray/tune/search/hyperopt/hyperopt_search.py b/python/ray/tune/search/hyperopt/hyperopt_search.py index 5924f19096983..802ead66add1d 100644 --- a/python/ray/tune/search/hyperopt/hyperopt_search.py +++ b/python/ray/tune/search/hyperopt/hyperopt_search.py @@ -40,6 +40,13 @@ logger = logging.getLogger(__name__) +HYPEROPT_UNDEFINED_DETAILS = ( + " This issue can also come up with HyperOpt if your search space only " + "contains constant variables, which is not supported by HyperOpt. In that case, " + "don't pass any searcher or add sample variables to the search space." +) + + class HyperOptSearch(Searcher): """A wrapper around HyperOpt to provide trial suggestions. @@ -192,6 +199,14 @@ def __init__( def _setup_hyperopt(self) -> None: from hyperopt.fmin import generate_trials_to_calculate + if not self._space: + raise RuntimeError( + UNDEFINED_SEARCH_SPACE.format( + cls=self.__class__.__name__, space="space" + ) + + HYPEROPT_UNDEFINED_DETAILS + ) + if self._metric is None and self._mode: # If only a mode was passed, use anonymous metric self._metric = DEFAULT_METRIC @@ -283,6 +298,7 @@ def suggest(self, trial_id: str) -> Optional[Dict]: UNDEFINED_SEARCH_SPACE.format( cls=self.__class__.__name__, space="space" ) + + HYPEROPT_UNDEFINED_DETAILS ) if not self._metric or not self._mode: raise RuntimeError( diff --git a/python/ray/tune/tests/test_sample.py b/python/ray/tune/tests/test_sample.py index 7fbeffd846b61..f40ec7339975f 100644 --- a/python/ray/tune/tests/test_sample.py +++ b/python/ray/tune/tests/test_sample.py @@ -1071,6 +1071,17 @@ def testConvertHyperOptNested(self): self.assertIn(config["domain_nested"], ["M", "N", "O", "P"]) + def testConvertHyperOptConstant(self): + from ray.tune.search.hyperopt import HyperOptSearch + + config = {"a": 4} + + searcher = HyperOptSearch() + with self.assertRaisesRegex( + RuntimeError, "This issue can also come up with HyperOpt" + ): + searcher.set_search_properties(metric="a", mode="max", config=config) + def testSampleBoundsHyperopt(self): from ray.tune.search.hyperopt import HyperOptSearch diff --git a/python/ray/tune/tuner.py b/python/ray/tune/tuner.py index f2aac736b2012..2bd3e86361344 100644 --- a/python/ray/tune/tuner.py +++ b/python/ray/tune/tuner.py @@ -28,6 +28,14 @@ _SELF = "self" +_TUNER_FAILED_MSG = ( + "The Ray Tune run failed. Please inspect the previous error messages for a " + "cause. After fixing the issue, you can restart the run from scratch or " + "continue this run. To continue this run, you can use " + '`tuner = Tuner.restore("{path}")`.' +) + + @PublicAPI(stability="beta") class Tuner: """Tuner is the recommended way of launching hyperparameter tuning jobs with Ray Tune. @@ -235,9 +243,9 @@ def fit(self) -> ResultGrid: return self._local_tuner.fit() except Exception as e: raise TuneError( - f"Tune run failed. " - f'Please use tuner = Tuner.restore("' - f'{self._local_tuner.get_experiment_checkpoint_dir()}") to resume.' + _TUNER_FAILED_MSG.format( + path=self._local_tuner.get_experiment_checkpoint_dir() + ) ) from e else: experiment_checkpoint_dir = ray.get( @@ -247,7 +255,5 @@ def fit(self) -> ResultGrid: return ray.get(self._remote_tuner.fit.remote()) except Exception as e: raise TuneError( - f"Tune run failed. " - f'Please use tuner = Tuner.restore("' - f'{experiment_checkpoint_dir}") to resume.' + _TUNER_FAILED_MSG.format(path=experiment_checkpoint_dir) ) from e