Skip to content

Commit

Permalink
Merge pull request #249 from bessagroup/mpvanderschelling/issue248
Browse files Browse the repository at this point in the history
Fixes #248
  • Loading branch information
mpvanderschelling authored Dec 11, 2023
2 parents 83e3540 + a281051 commit b13cd92
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 20 deletions.
86 changes: 72 additions & 14 deletions src/f3dasm/_src/experimentdata/experimentdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -1055,7 +1055,8 @@ def optimize(self, optimizer: Optimizer | str,
data_generator: DataGenerator | str,
iterations: int, kwargs: Optional[Dict[str, Any]] = None,
hyperparameters: Optional[Dict[str, Any]] = None,
x0_selection: str = 'best') -> None:
x0_selection: str = 'best',
sampler: Optional[Sampler | str] = 'random') -> None:
"""Optimize the experimentdata object
Parameters
Expand All @@ -1074,6 +1075,8 @@ def optimize(self, optimizer: Optimizer | str,
optimizer, by default None
x0_selection : str, optional
How to select the initial design, by default 'best'
sampler : Sampler | str
Sampler to use, by default 'random'
Raises
------
Expand All @@ -1089,6 +1092,10 @@ def optimize(self, optimizer: Optimizer | str,
* 'best': Select the best designs from the current experimentdata
* 'random': Select random designs from the current experimentdata
* 'last': Select the last designs from the current experimentdata
* 'new': Create new random designs from the current experimentdata
If the x0_selection is 'new', new designs are sampled with the
sampler provided.
The number of designs selected is equal to the \
population size of the optimizer
Expand All @@ -1101,15 +1108,25 @@ def optimize(self, optimizer: Optimizer | str,
optimizer: Optimizer = _optimizer_factory(
optimizer, self.domain, hyperparameters)

if isinstance(sampler, str):
sampler: Sampler = _sampler_factory(sampler, self.domain)

if optimizer.type == 'scipy':
self._iterate_scipy(
optimizer, data_generator, iterations, kwargs, x0_selection)
optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs=kwargs,
x0_selection=x0_selection,
sampler=sampler)
else:
self._iterate(
optimizer, data_generator, iterations, kwargs, x0_selection)
optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs=kwargs,
x0_selection=x0_selection,
sampler=sampler)

def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
iterations: int, kwargs: dict, x0_selection: str):
iterations: int, kwargs: dict, x0_selection: str,
sampler: Sampler):
"""Internal represenation of the iteration process
Parameters
Expand All @@ -1125,6 +1142,8 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
the DataGenerator, by default None
x0_selection : str
How to select the initial design
sampler: Sampler
If x0_selection = 'new', the sampler to use
Raises
------
Expand All @@ -1138,10 +1157,33 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
* 'best': Select the best designs from the current experimentdata
* 'random': Select random designs from the current experimentdata
* 'last': Select the last designs from the current experimentdata
* 'new': Create new random designs from the current experimentdata
The number of designs selected is equal to the \
If the x0_selection is 'new', new designs are sampled with the
sampler provided.
The number of designs selected is equal to the
population size of the optimizer
"""

if x0_selection == 'new':

if iterations < optimizer.hyperparameters.population:
raise ValueError(
f'For creating new samples, the total number of requested '
f'iterations ({iterations}) cannot be '
f'smaller than the population size '
f'({optimizer.hyperparameters.population})')

self.sample(sampler=sampler,
n_samples=optimizer.hyperparameters.population,
seed=optimizer.seed)

self.evaluate(data_generator=data_generator, kwargs=kwargs)

x0_selection = 'last'
iterations -= optimizer.hyperparameters.population

optimizer.set_x0(self, mode=x0_selection)
optimizer._check_number_of_datapoints()

Expand Down Expand Up @@ -1175,7 +1217,7 @@ def _iterate(self, optimizer: Optimizer, data_generator: DataGenerator,
def _iterate_scipy(self, optimizer: Optimizer,
data_generator: DataGenerator,
iterations: int, kwargs: dict,
x0_selection: str):
x0_selection: str, sampler: Sampler):
"""Internal represenation of the iteration process for s
cipy-optimize algorithms
Expand All @@ -1192,6 +1234,8 @@ def _iterate_scipy(self, optimizer: Optimizer,
to the DataGenerator, by default None
x0_selection : str
How to select the initial design
sampler: Sampler
If x0_selection = 'new', the sampler to use
Raises
------
Expand All @@ -1205,13 +1249,33 @@ def _iterate_scipy(self, optimizer: Optimizer,
* 'best': Select the best designs from the current experimentdata
* 'random': Select random designs from the current experimentdata
* 'last': Select the last designs from the current experimentdata
* 'new': Create new random designs from the current experimentdata
The number of designs selected is equal to the \
If the x0_selection is 'new', new designs are sampled with the
sampler provided.
The number of designs selected is equal to the
population size of the optimizer
"""
n_data_before_iterate = len(self)

if x0_selection == 'new':

if iterations < optimizer.hyperparameters.population:
raise ValueError(
f'For creating new samples, the number of requested '
f'iterations ({iterations}) cannot be'
f'smaller than the population size '
f'({optimizer.hyperparameters.population})')

self.sample(sampler='random',
n_samples=optimizer.hyperparameters.population,
seed=optimizer.seed)

self.evaluate(data_generator=data_generator, kwargs=kwargs)
x0_selection = 'last'

optimizer.set_x0(self, mode=x0_selection)
n_data_before_iterate = len(self)
optimizer._check_number_of_datapoints()

optimizer.run_algorithm(iterations, data_generator)
Expand All @@ -1225,14 +1289,8 @@ def _iterate_scipy(self, optimizer: Optimizer,
if len(self) == n_data_before_iterate:
repeated_x, repeated_y = self.get_n_best_output(
n_samples=1).to_numpy()
# repeated_last_element = self.get_n_best_output(
# n_samples=1).to_numpy()[0].ravel()

for repetition in range(iterations):
# self._add_experiments(
# ExperimentSample.from_numpy(repeated_last_element,
# domain=self.domain))

self.add(
domain=self.domain, input_data=repeated_x,
output_data=repeated_y)
Expand Down
26 changes: 20 additions & 6 deletions tests/optimization/test_all_optimizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,9 @@ def test_all_optimizers_3_functions(seed: int, data_generator: DataGenerator, op
@pytest.mark.parametrize("iterations", [10, 23, 66, 86])
@pytest.mark.parametrize("optimizer", OPTIMIZERS)
@pytest.mark.parametrize("data_generator", ["sphere"])
def test_optimizer_iterations(iterations: int, data_generator: str, optimizer: str):
@pytest.mark.parametrize("x0_selection", ["best", "new"])
def test_optimizer_iterations(iterations: int, data_generator: str,
optimizer: str, x0_selection: str):
numsamples = 40 # initial samples
seed = 42

Expand Down Expand Up @@ -111,12 +113,24 @@ def test_optimizer_iterations(iterations: int, data_generator: str, optimizer: s
data.evaluate(data_generator, mode='sequential', kwargs={'seed': seed, 'noise': None,
'scale_bounds': np.tile([-1.0, 1.0], (dim, 1)), })

data.optimize(optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs={'seed': seed, 'noise': None,
'scale_bounds': np.tile([-1.0, 1.0], (dim, 1)), },
hyperparameters={'seed': seed})
_optimizer = _optimizer_factory(optimizer, domain=domain)

assert len(data) == (iterations + numsamples)
if x0_selection == "new" and iterations < _optimizer.hyperparameters.population:
with pytest.raises(ValueError):
data.optimize(optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs={'seed': seed, 'noise': None,
'scale_bounds': np.tile([-1.0, 1.0], (dim, 1)), },
hyperparameters={'seed': seed},
x0_selection=x0_selection)
else:

data.optimize(optimizer=optimizer, data_generator=data_generator,
iterations=iterations, kwargs={'seed': seed, 'noise': None,
'scale_bounds': np.tile([-1.0, 1.0], (dim, 1)), },
hyperparameters={'seed': seed},
x0_selection=x0_selection)

assert len(data) == (iterations + numsamples)


if __name__ == "__main__": # pragma: no cover
Expand Down

0 comments on commit b13cd92

Please sign in to comment.