Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
Update evaluate parameters from GMM (#1195)
Browse files Browse the repository at this point in the history
* update readme in ga_squad

* update readme

* fix typo

* Update README.md

* Update README.md

* Update README.md

* update readme

* update

* fix path

* update reference

* fix bug in config file

* update nni_arch_overview.png

* update

* update

* update

* update home page

* update default value of metis tuner

* fix broken link in CommunitySharings

* update docs about nested search space

* update docs

* rename cascding to nested

* fix broken link

* update

* update issue link

* fix typo

* update evaluate parameters from GMM

* refine code

* fix optimized mode bug

* update import warning

* update warning

* update optimized mode
  • Loading branch information
xuehui1991 authored and leckie-chn committed Jun 25, 2019
1 parent 8329d30 commit 28999d4
Show file tree
Hide file tree
Showing 4 changed files with 47 additions and 30 deletions.
2 changes: 1 addition & 1 deletion src/sdk/pynni/nni/bohb_advisor/bohb_advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def __init__(self, s, s_max, eta, max_budget, optimize_mode):
self.s_max = s_max
self.eta = eta
self.max_budget = max_budget
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)

self.n = math.ceil((s_max + 1) * eta**s / (s + 1) - _epsilon)
self.r = max_budget / eta**s
Expand Down
2 changes: 1 addition & 1 deletion src/sdk/pynni/nni/hyperband_advisor/hyperband_advisor.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ def __init__(self, s, s_max, eta, R, optimize_mode):
self.configs_perf = [] # [ {id: [seq, acc]}, {}, ... ]
self.num_configs_to_run = [] # [ n, n, n, ... ]
self.num_finished_configs = [] # [ n, n, n, ... ]
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)
self.no_more_trial = False

def is_completed(self):
Expand Down
9 changes: 5 additions & 4 deletions src/sdk/pynni/nni/metis_tuner/Regression_GMM/Selection.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,15 +49,16 @@ def selection_r(x_bounds,
num_starting_points=100,
minimize_constraints_fun=None):
'''
Call selection
Select using different types.
'''
minimize_starting_points = [lib_data.rand(x_bounds, x_types)\
for i in range(0, num_starting_points)]
minimize_starting_points = clusteringmodel_gmm_good.sample(n_samples=num_starting_points)

outputs = selection(x_bounds, x_types,
clusteringmodel_gmm_good,
clusteringmodel_gmm_bad,
minimize_starting_points,
minimize_starting_points[0],
minimize_constraints_fun)

return outputs

def selection(x_bounds,
Expand Down
64 changes: 40 additions & 24 deletions src/sdk/pynni/nni/metis_tuner/metis_tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,15 @@

import copy
import logging
import numpy as np
import os
import random
import statistics
import sys
import warnings
from enum import Enum, unique
from multiprocessing.dummy import Pool as ThreadPool

import numpy as np

import nni.metis_tuner.lib_constraint_summation as lib_constraint_summation
import nni.metis_tuner.lib_data as lib_data
import nni.metis_tuner.Regression_GMM.CreateModel as gmm_create_model
Expand All @@ -42,8 +42,6 @@

logger = logging.getLogger("Metis_Tuner_AutoML")



NONE_TYPE = ''
CONSTRAINT_LOWERBOUND = None
CONSTRAINT_UPPERBOUND = None
Expand Down Expand Up @@ -93,7 +91,7 @@ def __init__(self, optimize_mode="maximize", no_resampling=True, no_candidates=F
self.space = None
self.no_resampling = no_resampling
self.no_candidates = no_candidates
self.optimize_mode = optimize_mode
self.optimize_mode = OptimizeMode(optimize_mode)
self.key_order = []
self.cold_start_num = cold_start_num
self.selection_num_starting_points = selection_num_starting_points
Expand Down Expand Up @@ -254,6 +252,9 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
threshold_samplessize_resampling=50, no_candidates=False,
minimize_starting_points=None, minimize_constraints_fun=None):

with warnings.catch_warnings():
warnings.simplefilter("ignore")

next_candidate = None
candidates = []
samples_size_all = sum([len(i) for i in samples_y])
Expand All @@ -271,13 +272,12 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
minimize_constraints_fun=minimize_constraints_fun)
if not lm_current:
return None

if no_candidates is False:
candidates.append({'hyperparameter': lm_current['hyperparameter'],
logger.info({'hyperparameter': lm_current['hyperparameter'],
'expected_mu': lm_current['expected_mu'],
'expected_sigma': lm_current['expected_sigma'],
'reason': "exploitation_gp"})

if no_candidates is False:
# ===== STEP 2: Get recommended configurations for exploration =====
results_exploration = gp_selection.selection(
"lc",
Expand All @@ -290,34 +290,48 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,

if results_exploration is not None:
if _num_past_samples(results_exploration['hyperparameter'], samples_x, samples_y) == 0:
candidates.append({'hyperparameter': results_exploration['hyperparameter'],
temp_candidate = {'hyperparameter': results_exploration['hyperparameter'],
'expected_mu': results_exploration['expected_mu'],
'expected_sigma': results_exploration['expected_sigma'],
'reason': "exploration"})
'reason': "exploration"}
candidates.append(temp_candidate)

logger.info("DEBUG: 1 exploration candidate selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable exploration candidates were")

# ===== STEP 3: Get recommended configurations for exploitation =====
if samples_size_all >= threshold_samplessize_exploitation:
print("Getting candidates for exploitation...\n")
logger.info("Getting candidates for exploitation...\n")
try:
gmm = gmm_create_model.create_model(samples_x, samples_y_aggregation)
results_exploitation = gmm_selection.selection(
x_bounds,
x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
minimize_starting_points,
minimize_constraints_fun=minimize_constraints_fun)

if ("discrete_int" in x_types) or ("range_int" in x_types):
results_exploitation = gmm_selection.selection(x_bounds, x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
minimize_starting_points,
minimize_constraints_fun=minimize_constraints_fun)
else:
# If all parameters are of "range_continuous", let's use GMM to generate random starting points
results_exploitation = gmm_selection.selection_r(x_bounds, x_types,
gmm['clusteringmodel_good'],
gmm['clusteringmodel_bad'],
num_starting_points=self.selection_num_starting_points,
minimize_constraints_fun=minimize_constraints_fun)

if results_exploitation is not None:
if _num_past_samples(results_exploitation['hyperparameter'], samples_x, samples_y) == 0:
candidates.append({'hyperparameter': results_exploitation['hyperparameter'],\
'expected_mu': results_exploitation['expected_mu'],\
'expected_sigma': results_exploitation['expected_sigma'],\
'reason': "exploitation_gmm"})
temp_expected_mu, temp_expected_sigma = gp_prediction.predict(results_exploitation['hyperparameter'], gp_model['model'])
temp_candidate = {'hyperparameter': results_exploitation['hyperparameter'],
'expected_mu': temp_expected_mu,
'expected_sigma': temp_expected_sigma,
'reason': "exploitation_gmm"}
candidates.append(temp_candidate)

logger.info("DEBUG: 1 exploitation_gmm candidate selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable exploitation_gmm candidates were found\n")

Expand All @@ -338,11 +352,13 @@ def _selection(self, samples_x, samples_y_aggregation, samples_y,
if results_outliers is not None:
for results_outlier in results_outliers:
if _num_past_samples(samples_x[results_outlier['samples_idx']], samples_x, samples_y) < max_resampling_per_x:
candidates.append({'hyperparameter': samples_x[results_outlier['samples_idx']],\
temp_candidate = {'hyperparameter': samples_x[results_outlier['samples_idx']],\
'expected_mu': results_outlier['expected_mu'],\
'expected_sigma': results_outlier['expected_sigma'],\
'reason': "resampling"})
'reason': "resampling"}
candidates.append(temp_candidate)
logger.info("DEBUG: %d re-sampling candidates selected\n")
logger.info(temp_candidate)
else:
logger.info("DEBUG: No suitable resampling candidates were found\n")

Expand Down

0 comments on commit 28999d4

Please sign in to comment.