From 08b5f33f821a270361aeb8666be4dc7b95572dd8 Mon Sep 17 00:00:00 2001 From: Dimitris Tsapetis Date: Mon, 21 Nov 2022 12:25:17 -0500 Subject: [PATCH] Fixes AKMCS tests --- src/UQpy/utilities/MinimizeOptimizer.py | 4 +- .../sampling/test_adaptive_kriging.py | 152 ++++++++---------- 2 files changed, 72 insertions(+), 84 deletions(-) diff --git a/src/UQpy/utilities/MinimizeOptimizer.py b/src/UQpy/utilities/MinimizeOptimizer.py index aa76a99e..d15904b0 100644 --- a/src/UQpy/utilities/MinimizeOptimizer.py +++ b/src/UQpy/utilities/MinimizeOptimizer.py @@ -24,11 +24,11 @@ def optimize(self, function, initial_guess, args=(), jac=False): return minimize(function, initial_guess, args=args, method=self.method, bounds=self._bounds, constraints=self.constraints, jac=jac, - options={'disp': True, 'maxiter': 10000, 'catol': 0.002}) + options={'disp': False, 'maxiter': 10000, 'catol': 0.002}) else: return minimize(function, initial_guess, args=args, method=self.method, bounds=self._bounds, jac=jac, - options={'disp': True, 'maxiter': 10000, 'catol': 0.002}) + options={'disp': False, 'maxiter': 10000, 'catol': 0.002}) def apply_constraints(self, constraints): if self.method.lower() in ['cobyla', 'slsqp', 'trust-constr']: diff --git a/tests/unit_tests/sampling/test_adaptive_kriging.py b/tests/unit_tests/sampling/test_adaptive_kriging.py index ab0c2871..73070dba 100644 --- a/tests/unit_tests/sampling/test_adaptive_kriging.py +++ b/tests/unit_tests/sampling/test_adaptive_kriging.py @@ -1,5 +1,6 @@ import pytest +from UQpy import GaussianProcessRegression, RBF, LinearRegression from UQpy.run_model.model_execution.PythonModel import PythonModel from UQpy.utilities.MinimizeOptimizer import MinimizeOptimizer @@ -7,172 +8,159 @@ from UQpy.run_model.RunModel import RunModel from UQpy.distributions.collection import Normal from UQpy.sampling.adaptive_kriging_functions import * -import shutil def test_akmcs_weighted_u(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation - marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=0) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizer=MinimizeOptimizer('l-bfgs-b'), - optimizations_number=10, correlation_model_parameters=[1, 1], random_state=1) + + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=10, noise=False, regression_model=LinearRegression(), + random_state=1) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = WeightedUFunction(weighted_u_stop=2) - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2) a.run(nsamples=25, samples=x.samples) - assert a.samples[23, 0] == 1.083176685073489 - assert a.samples[20, 1] == 0.20293978126855253 - + assert a.samples[23, 0] == -0.48297825309989356 + assert a.samples[20, 1] == 0.39006110248010434 def test_akmcs_u(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizer=MinimizeOptimizer('l-bfgs-b'), - optimizations_number=10, correlation_model_parameters=[1, 1], random_state=0) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=10, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = UFunction(u_stop=2) - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2) a.run(nsamples=25, samples=x.samples) - assert a.samples[23, 0] == -4.141979058326188 - assert a.samples[20, 1] == -1.6476534435429009 - + assert a.samples[23, 0] == -3.781937137406927 + assert a.samples[20, 1] == 0.17610325620498946 def test_akmcs_expected_feasibility(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizations_number=10, correlation_model_parameters=[1, 1], - optimizer=MinimizeOptimizer('l-bfgs-b'),) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=20, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = ExpectedFeasibility(eff_a=0, eff_epsilon=2, eff_stop=0.001) - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2) a.run(nsamples=25, samples=x.samples) - assert a.samples[23, 0] == 1.366058523912817 - assert a.samples[20, 1] == -12.914668932772358 - + assert a.samples[23, 0] == 5.423754197908594 + assert a.samples[20, 1] == 2.0355505295053384 def test_akmcs_expected_improvement(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizations_number=10, correlation_model_parameters=[1, 1], - optimizer=MinimizeOptimizer('l-bfgs-b'),) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=50, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = ExpectedImprovement() - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2) a.run(nsamples=25, samples=x.samples) - assert a.samples[23, 0] == 4.553078100499578 - assert a.samples[20, 1] == -3.508949564718469 - + assert a.samples[21, 0] == 6.878734574049913 + assert a.samples[20, 1] == -6.3410533857909215 def test_akmcs_expected_improvement_global_fit(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizations_number=10, correlation_model_parameters=[1, 1], - optimizer=MinimizeOptimizer('l-bfgs-b'),) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=50, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = ExpectedImprovementGlobalFit() - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2) a.run(nsamples=25, samples=x.samples) - assert a.samples[23, 0] == 11.939859785098493 - assert a.samples[20, 1] == -8.429899469300118 + assert a.samples[23, 0] == -10.24267076486663 + assert a.samples[20, 1] == -11.419510366469687 def test_akmcs_samples_error(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=0) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizer=MinimizeOptimizer('l-bfgs-b'), - optimizations_number=10, correlation_model_parameters=[1, 1], random_state=1) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=50, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = WeightedUFunction(weighted_u_stop=2) with pytest.raises(NotImplementedError): - a = AdaptiveKriging(distributions=[Normal(loc=0., scale=4.)]*3, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=[Normal(loc=0., scale=4.)] * 3, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2, samples=x.samples) def test_akmcs_u_run_from_init(): - from UQpy.surrogates.kriging.regression_models.LinearRegression import LinearRegression - from UQpy.surrogates.kriging.correlation_models.ExponentialCorrelation import ExponentialCorrelation - marginals = [Normal(loc=0., scale=4.), Normal(loc=0., scale=4.)] x = MonteCarloSampling(distributions=marginals, nsamples=20, random_state=1) model = PythonModel(model_script='series.py', model_object_name="series") rmodel = RunModel(model=model) - regression_model = LinearRegression() - correlation_model = ExponentialCorrelation() - K = Kriging(regression_model=regression_model, correlation_model=correlation_model, - optimizer=MinimizeOptimizer('l-bfgs-b'), - optimizations_number=10, correlation_model_parameters=[1, 1], random_state=0) + kernel1 = RBF() + bounds_1 = [[10 ** (-4), 10 ** 3], [10 ** (-3), 10 ** 2], [10 ** (-3), 10 ** 2]] + optimizer1 = MinimizeOptimizer(method='L-BFGS-B', bounds=bounds_1) + gpr = GaussianProcessRegression(kernel=kernel1, hyperparameters=[1, 10 ** (-3), 10 ** (-2)], optimizer=optimizer1, + optimizations_number=100, noise=False, regression_model=LinearRegression(), + random_state=0) # OPTIONS: 'U', 'EFF', 'Weighted-U' learning_function = UFunction(u_stop=2) - a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=K, - learning_nsamples=10**3, n_add=1, learning_function=learning_function, + a = AdaptiveKriging(distributions=marginals, runmodel_object=rmodel, surrogate=gpr, + learning_nsamples=10 ** 3, n_add=1, learning_function=learning_function, random_state=2, nsamples=25, samples=x.samples) - assert a.samples[23, 0] == -4.141979058326188 - assert a.samples[20, 1] == -1.6476534435429009 + assert a.samples[23, 0] == -3.781937137406927 + assert a.samples[20, 1] == 0.17610325620498946