Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
Implement enas-mode and oneshot-mode for NAS interface (#1201)
Browse files Browse the repository at this point in the history
* update

* dev-tf-master

* fix bugs

* fix bugs

* remove unnecessary lines

* dev oneshot and darts

* dev nas

* dev enas and oneshot

* dev enas and oneshot

* dev enas and oneshot

* dev oneshot and enas

* dev oneshot

* add ut

* add docstring

* add docstring

* fix

* resolve comments by changing docstring

* resolve comments
  • Loading branch information
Crysple authored and leckie-chn committed Jun 25, 2019
1 parent 761e6d5 commit 2aa825b
Show file tree
Hide file tree
Showing 9 changed files with 243 additions and 24 deletions.
1 change: 1 addition & 0 deletions src/nni_manager/rest_server/restValidationSchemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ export namespace ValidationSchemas {
command: joi.string().min(1),
virtualCluster: joi.string(),
shmMB: joi.number(),
nasMode: joi.string().valid('classic_mode', 'enas_mode', 'oneshot_mode'),
worker: joi.object({
replicas: joi.number().min(1).required(),
image: joi.string().min(1),
Expand Down
1 change: 1 addition & 0 deletions src/sdk/pynni/nni/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from .trial import *
from .smartparam import *
from .nas_utils import reload_tensorflow_variables

class NoMoreTrialError(Exception):
def __init__(self,ErrorInfo):
Expand Down
160 changes: 160 additions & 0 deletions src/sdk/pynni/nni/nas_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,160 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ==================================================================================================

from . import trial


def classic_mode(
mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size):
'''Execute the chosen function and inputs directly.
In this mode, the trial code is only running the chosen subgraph (i.e., the chosen ops and inputs),
without touching the full model graph.'''
if trial._params is None:
trial.get_next_parameter()
mutable_block = trial.get_current_parameter(mutable_id)
chosen_layer = mutable_block[mutable_layer_id]["chosen_layer"]
chosen_inputs = mutable_block[mutable_layer_id]["chosen_inputs"]
real_chosen_inputs = [optional_inputs[input_name]
for input_name in chosen_inputs]
layer_out = funcs[chosen_layer](
[fixed_inputs, real_chosen_inputs], **funcs_args[chosen_layer])

return layer_out


def enas_mode(
mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size,
tf):
'''For enas mode, we build the full model graph in trial but only run a subgraph。
This is implemented by masking inputs and branching ops.
Specifically, based on the received subgraph (through nni.get_next_parameter),
it can be known which inputs should be masked and which op should be executed.'''
name_prefix = "{}_{}".format(mutable_id, mutable_layer_id)
# store namespace
if 'name_space' not in globals():
global name_space
name_space = dict()
name_space[mutable_id] = True
name_space[name_prefix] = dict()
name_space[name_prefix]['funcs'] = list(funcs)
name_space[name_prefix]['optional_inputs'] = list(optional_inputs)
# create tensorflow variables as 1/0 signals used to form subgraph
if 'tf_variables' not in globals():
global tf_variables
tf_variables = dict()
name_for_optional_inputs = name_prefix + '_optional_inputs'
name_for_funcs = name_prefix + '_funcs'
tf_variables[name_prefix] = dict()
tf_variables[name_prefix]['optional_inputs'] = tf.get_variable(name_for_optional_inputs,
[len(
optional_inputs)],
dtype=tf.bool,
trainable=False)
tf_variables[name_prefix]['funcs'] = tf.get_variable(
name_for_funcs, [], dtype=tf.int64, trainable=False)

# get real values using their variable names
real_optional_inputs_value = [optional_inputs[name]
for name in name_space[name_prefix]['optional_inputs']]
real_func_value = [funcs[name]
for name in name_space[name_prefix]['funcs']]
real_funcs_args = [funcs_args[name]
for name in name_space[name_prefix]['funcs']]
# build tensorflow graph of geting chosen inputs by masking
real_chosen_inputs = tf.boolean_mask(
real_optional_inputs_value, tf_variables[name_prefix]['optional_inputs'])
# build tensorflow graph of different branches by using tf.case
branches = dict()
for func_id in range(len(funcs)):
func_output = real_func_value[func_id](
[fixed_inputs, real_chosen_inputs], **real_funcs_args[func_id])
branches[tf.equal(tf_variables[name_prefix]['funcs'],
func_id)] = lambda: func_output
layer_out = tf.case(branches, exclusive=True,
default=lambda: func_output)

return layer_out


def oneshot_mode(
mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size,
tf):
'''Similar to enas mode, oneshot mode also builds the full model graph.
The difference is that oneshot mode does not receive subgraph.
Instead, it uses dropout to randomly dropout inputs and ops.'''
# NNI requires to get_next_parameter before report a result. But the parameter will not be used in this mode
if trial._params is None:
trial.get_next_parameter()
optional_inputs = list(optional_inputs.values())
inputs_num = len(optional_inputs)
# Calculate dropout rate according to the formular r^(1/k), where r is a hyper-parameter and k is the number of inputs
if inputs_num > 0:
rate = 0.01 ** (1 / inputs_num)
noise_shape = [inputs_num] + [1] * len(optional_inputs[0].get_shape())
optional_inputs = tf.nn.dropout(
optional_inputs, rate=rate, noise_shape=noise_shape)
optional_inputs = [optional_inputs[idx] for idx in range(inputs_num)]
layer_outs = [func([fixed_inputs, optional_inputs], **funcs_args[func_name])
for func_name, func in funcs.items()]
layer_out = tf.add_n(layer_outs)

return layer_out


def reload_tensorflow_variables(session, tf=None):
'''In Enas mode, this function reload every signal varaible created in `enas_mode` function so
the whole tensorflow graph will be changed into certain subgraph recerived from Tuner.
---------------
session: the tensorflow session created by users
tf: tensorflow module
'''
subgraph_from_tuner = trial.get_next_parameter()
for mutable_id, mutable_block in subgraph_from_tuner.items():
if mutable_id not in name_space:
continue
for mutable_layer_id, mutable_layer in mutable_block.items():
name_prefix = "{}_{}".format(mutable_id, mutable_layer_id)
# extract layer information from the subgraph sampled by tuner
chosen_layer = name_space[name_prefix]['funcs'].index(
mutable_layer["chosen_layer"])
chosen_inputs = [1 if inp in mutable_layer["chosen_inputs"]
else 0 for inp in name_space[name_prefix]['optional_inputs']]
# load these information into pre-defined tensorflow variables
tf_variables[name_prefix]['funcs'].load(chosen_layer, session)
tf_variables[name_prefix]['optional_inputs'].load(
chosen_inputs, session)
43 changes: 35 additions & 8 deletions src/sdk/pynni/nni/smartparam.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@

from .env_vars import trial_env_vars
from . import trial
from .nas_utils import classic_mode, enas_mode, oneshot_mode


__all__ = [
Expand Down Expand Up @@ -124,7 +125,9 @@ def mutable_layer(
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size):
optional_input_size,
mode='classic_mode',
tf=None):
'''execute the chosen function and inputs.
Below is an example of chosen function and inputs:
{
Expand All @@ -144,14 +147,38 @@ def mutable_layer(
fixed_inputs:
optional_inputs: dict of optional inputs
optional_input_size: number of candidate inputs to be chosen
tf: tensorflow module
'''
mutable_block = _get_param(mutable_id)
chosen_layer = mutable_block[mutable_layer_id]["chosen_layer"]
chosen_inputs = mutable_block[mutable_layer_id]["chosen_inputs"]
real_chosen_inputs = [optional_inputs[input_name] for input_name in chosen_inputs]
layer_out = funcs[chosen_layer]([fixed_inputs, real_chosen_inputs], **funcs_args[chosen_layer])

return layer_out
if mode == 'classic_mode':
return classic_mode(mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size)
elif mode == 'enas_mode':
assert tf is not None, 'Internal Error: Tensorflow should not be None in enas_mode'
return enas_mode(mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size,
tf)
elif mode == 'oneshot_mode':
assert tf is not None, 'Internal Error: Tensorflow should not be None in oneshot_mode'
return oneshot_mode(mutable_id,
mutable_layer_id,
funcs,
funcs_args,
fixed_inputs,
optional_inputs,
optional_input_size,
tf)
else:
raise RuntimeError('Unrecognized mode: %s' % mode)

def _get_param(key):
if trial._params is None:
Expand Down
17 changes: 16 additions & 1 deletion src/sdk/pynni/tests/test_smartparam.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,13 @@ def setUp(self):
'test_smartparam/choice3/choice': '[1, 2]',
'test_smartparam/choice4/choice': '{"a", 2}',
'test_smartparam/func/function_choice': 'bar',
'test_smartparam/lambda_func/function_choice': "lambda: 2*3"
'test_smartparam/lambda_func/function_choice': "lambda: 2*3",
'mutable_block_66':{
'mutable_layer_0':{
'chosen_layer': 'conv2D(size=5)',
'chosen_inputs': ['y']
}
}
}
nni.trial._params = { 'parameter_id': 'test_trial', 'parameters': params }

Expand All @@ -61,13 +67,22 @@ def test_lambda_func(self):
val = nni.function_choice({"lambda: 2*3": lambda: 2*3, "lambda: 3*4": lambda: 3*4}, name = 'lambda_func', key='test_smartparam/lambda_func/function_choice')
self.assertEqual(val, 6)

def test_mutable_layer(self):
layer_out = nni.mutable_layer('mutable_block_66',
'mutable_layer_0', {'conv2D(size=3)': conv2D, 'conv2D(size=5)': conv2D}, {'conv2D(size=3)':
{'size':3}, 'conv2D(size=5)': {'size':5}}, [100], {'x':1,'y':2}, 1, 'classic_mode')
self.assertEqual(layer_out, [100, 2, 5])



def foo():
return 'foo'

def bar():
return 'bar'

def conv2D(inputs, size=3):
return inputs[0] + inputs[1] + [size]

if __name__ == '__main__':
main()
9 changes: 5 additions & 4 deletions tools/nni_annotation/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,11 +76,12 @@ def _generate_file_search_space(path, module):
return search_space


def expand_annotations(src_dir, dst_dir, exp_id='', trial_id=''):
def expand_annotations(src_dir, dst_dir, exp_id='', trial_id='', nas_mode=None):
"""Expand annotations in user code.
Return dst_dir if annotation detected; return src_dir if not.
src_dir: directory path of user code (str)
dst_dir: directory to place generated files (str)
nas_mode: the mode of NAS given that NAS interface is used
"""
if src_dir[-1] == slash:
src_dir = src_dir[:-1]
Expand Down Expand Up @@ -108,7 +109,7 @@ def expand_annotations(src_dir, dst_dir, exp_id='', trial_id=''):
dst_path = os.path.join(dst_subdir, file_name)
if file_name.endswith('.py'):
if trial_id == '':
annotated |= _expand_file_annotations(src_path, dst_path)
annotated |= _expand_file_annotations(src_path, dst_path, nas_mode)
else:
module = package + file_name[:-3]
annotated |= _generate_specific_file(src_path, dst_path, exp_id, trial_id, module)
Expand All @@ -120,10 +121,10 @@ def expand_annotations(src_dir, dst_dir, exp_id='', trial_id=''):

return dst_dir if annotated else src_dir

def _expand_file_annotations(src_path, dst_path):
def _expand_file_annotations(src_path, dst_path, nas_mode):
with open(src_path) as src, open(dst_path, 'w') as dst:
try:
annotated_code = code_generator.parse(src.read())
annotated_code = code_generator.parse(src.read(), nas_mode)
if annotated_code is None:
shutil.copyfile(src_path, dst_path)
return False
Expand Down
Loading

0 comments on commit 2aa825b

Please sign in to comment.