From 9ad789e7a88f419012f177e49343a2b7a063a3d1 Mon Sep 17 00:00:00 2001 From: quzha Date: Thu, 24 Jun 2021 11:13:26 +0800 Subject: [PATCH 1/8] add the first hypermodule, autoactivation --- nni/retiarii/nn/pytorch/__init__.py | 1 + nni/retiarii/nn/pytorch/hypermodule.py | 120 +++++++++++++++++++++++++ 2 files changed, 121 insertions(+) create mode 100644 nni/retiarii/nn/pytorch/hypermodule.py diff --git a/nni/retiarii/nn/pytorch/__init__.py b/nni/retiarii/nn/pytorch/__init__.py index 5c392164b1..bcc8c45f3f 100644 --- a/nni/retiarii/nn/pytorch/__init__.py +++ b/nni/retiarii/nn/pytorch/__init__.py @@ -1,3 +1,4 @@ from .api import * from .component import * from .nn import * +from .hypermodule import * \ No newline at end of file diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py new file mode 100644 index 0000000000..c27354a72a --- /dev/null +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -0,0 +1,120 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. + +import math + +import torch +import torch.nn as nn + +from .api import LayerChoice + +__all__ = ['AutoActivation'] + +class UnaryMul(nn.Module): + def __init__(self): + super().__init__() + # element-wise for now, will change to per-channel trainable parameter + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + def forward(self, x): + return x * self.beta + +class UnaryAdd(nn.Module): + def __init__(self): + super().__init__() + # element-wise for now, will change to per-channel trainable parameter + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + def forward(self, x): + return x + self.beta + +class BinaryExpSquare(nn.Module): + def __init__(self): + super().__init__() + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + def forward(self, x, y): + return torch.exp(-self.beta * torch.square(x - y)) + +class BinaryExpAbs(nn.Module): + def __init__(self): + super().__init__() + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + def forward(self, x, y): + return torch.exp(-self.beta * torch.abs(x - y)) + +class BinaryAdd(nn.Module): + def __init__(self): + super().__init__() + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + def forward(self, x, y): + return self.beta * x + (1 - self.beta) * y + +autoact_unary_funcs = [ + lambda x: x, + lambda x: -x, + lambda x: torch.abs(x), + lambda x: torch.square(x), + lambda x: torch.pow(x, 3), + lambda x: torch.pow(x, 0.5), + #UnaryMul(), + #UnaryAdd(), + lambda x: torch.log(torch.abs(x) + 1e-7), + lambda x: torch.exp(x), + lambda x: torch.sin(x), + lambda x: torch.cos(x), + lambda x: torch.sinh(x), + lambda x: torch.cosh(x), + lambda x: torch.tanh(x), + lambda x: torch.asinh(x), + lambda x: torch.atan(x), + lambda x: torch.sinc(x), + lambda x: torch.max(x, torch.zeros_like(x)), + lambda x: torch.min(x, torch.zeros_like(x)), + lambda x: torch.sigmoid(x), + lambda x: torch.log(1 + torch.exp(x)), + lambda x: torch.exp(-torch.square(x)), + lambda x: torch.erf(x) +] + +autoact_binary_funcs = [ + lambda x, y: x + y, + lambda x, y: x * y, + lambda x, y: x - y, + lambda x, y: x / (y + 1e-7), + lambda x, y: torch.max(x, y), + lambda x, y: torch.min(x, y), + lambda x, y: torch.sigmoid(x) * y, + #BinaryExpSquare(), + #BinaryExpAbs(), + #BinaryAdd() +] + +class UnaryFunctionalModule(nn.Module): + def __init__(self, fn): + super().__init__() + self.fn = fn + def forward(self, x): + return self.fn(x) + +class BinaryFunctionalModule(nn.Module): + def __init__(self, fn): + super().__init__() + self.fn = fn + def forward(self, x, y): + return self.fn(x, y) + +class AutoActivation(nn.Module): + """ + """ + def __init__(self, unit_num = 1): + super().__init__() + unary1_cand = [UnaryFunctionalModule(fn) for fn in autoact_unary_funcs] + unary1_cand.extend([UnaryMul(), UnaryAdd()]) + unary2_cand = [UnaryFunctionalModule(fn) for fn in autoact_unary_funcs] + unary2_cand.extend([UnaryMul(), UnaryAdd()]) + binary_cand = [BinaryFunctionalModule(fn) for fn in autoact_binary_funcs] + binary_cand.extend([BinaryExpSquare(), BinaryExpAbs(), BinaryAdd()]) + self.unary1 = LayerChoice(unary1_cand, label='one_unary') + self.unary2 = LayerChoice(unary2_cand, label='one_unary') + self.binary = LayerChoice(binary_cand) + + def forward(self, x): + return self.binary(self.unary1(x), self.unary2(x)) From 32c4f515a5f77998e8748f70099c29ef28f4fe29 Mon Sep 17 00:00:00 2001 From: quzha Date: Mon, 28 Jun 2021 15:53:06 +0800 Subject: [PATCH 2/8] support base execution engine --- nni/retiarii/nn/pytorch/hypermodule.py | 256 +++++++++++++++------ nni/retiarii/operation_def/torch_op_def.py | 4 +- 2 files changed, 187 insertions(+), 73 deletions(-) diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py index c27354a72a..99814bc29d 100644 --- a/nni/retiarii/nn/pytorch/hypermodule.py +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -6,10 +6,48 @@ import torch import torch.nn as nn +from nni.retiarii.serializer import basic_unit + from .api import LayerChoice +from ...utils import version_larger_equal __all__ = ['AutoActivation'] +TorchVersion = '1.5.0' + +# ============== unary function modules ============== + +@basic_unit +class UnaryIdentity(nn.Module): + def forward(self, x): + return x + +@basic_unit +class UnaryNegative(nn.Module): + def forward(self, x): + return -x + +@basic_unit +class UnaryAbs(nn.Module): + def forward(self, x): + return torch.abs(x) + +@basic_unit +class UnarySquare(nn.Module): + def forward(self, x): + return torch.square(x) + +@basic_unit +class UnaryPow(nn.Module): + def forward(self, x): + return torch.pow(x, 3) + +@basic_unit +class UnarySqrt(nn.Module): + def forward(self, x): + return torch.sqrt(x) + +@basic_unit class UnaryMul(nn.Module): def __init__(self): super().__init__() @@ -18,6 +56,7 @@ def __init__(self): def forward(self, x): return x * self.beta +@basic_unit class UnaryAdd(nn.Module): def __init__(self): super().__init__() @@ -26,95 +65,170 @@ def __init__(self): def forward(self, x): return x + self.beta +@basic_unit +class UnaryLogAbs(nn.Module): + def forward(self, x): + return torch.log(torch.abs(x) + 1e-7) + +@basic_unit +class UnaryExp(nn.Module): + def forward(self, x): + return torch.exp(x) + +@basic_unit +class UnarySin(nn.Module): + def forward(self, x): + return torch.sin(x) + +@basic_unit +class UnaryCos(nn.Module): + def forward(self, x): + return torch.cos(x) + +@basic_unit +class UnarySinh(nn.Module): + def forward(self, x): + return torch.sinh(x) + +@basic_unit +class UnaryCosh(nn.Module): + def forward(self, x): + return torch.cosh(x) + +@basic_unit +class UnaryTanh(nn.Module): + def forward(self, x): + return torch.tanh(x) + +if not version_larger_equal(torch.__version__, TorchVersion): + @basic_unit + class UnaryAsinh(nn.Module): + def forward(self, x): + return torch.asinh(x) + +@basic_unit +class UnaryAtan(nn.Module): + def forward(self, x): + return torch.atan(x) + +if not version_larger_equal(torch.__version__, TorchVersion): + @basic_unit + class UnarySinc(nn.Module): + def forward(self, x): + return torch.sinc(x) + +@basic_unit +class UnaryMax(nn.Module): + def forward(self, x): + return torch.max(x, torch.zeros_like(x)) + +@basic_unit +class UnaryMin(nn.Module): + def forward(self, x): + return torch.min(x, torch.zeros_like(x)) + +@basic_unit +class UnarySigmoid(nn.Module): + def forward(self, x): + return torch.sigmoid(x) + +@basic_unit +class UnaryLogExp(nn.Module): + def forward(self, x): + return torch.log(1 + torch.exp(x)) + +@basic_unit +class UnaryExpSquare(nn.Module): + def forward(self, x): + return torch.exp(-torch.square(x)) + +@basic_unit +class UnaryErf(nn.Module): + def forward(self, x): + return torch.erf(x) + +unary_modules = ['UnaryIdentity', 'UnaryNegative', 'UnaryAbs', 'UnarySquare', 'UnaryPow', + 'UnarySqrt', 'UnaryMul', 'UnaryAdd', 'UnaryLogAbs', 'UnaryExp', 'UnarySin', 'UnaryCos', + 'UnarySinh', 'UnaryCosh', 'UnaryTanh', 'UnaryAtan', 'UnaryMax', + 'UnaryMin', 'UnarySigmoid', 'UnaryLogExp', 'UnaryExpSquare', 'UnaryErf'] + +if not version_larger_equal(torch.__version__, TorchVersion): + unary_modules.append('UnaryAsinh') + unary_modules.append('UnarySinc') + +# ============== binary function modules ============== + +@basic_unit +class BinaryAdd(nn.Module): + def forward(self, x): + return x[0] + x[1] + +@basic_unit +class BinaryMul(nn.Module): + def forward(self, x): + return x[0] * x[1] + +@basic_unit +class BinaryMinus(nn.Module): + def forward(self, x): + return x[0] - x[1] + +@basic_unit +class BinaryDivide(nn.Module): + def forward(self, x): + return x[0] / (x[1] + 1e-7) + +@basic_unit +class BinaryMax(nn.Module): + def forward(self, x): + return torch.max(x[0], x[1]) + +@basic_unit +class BinaryMin(nn.Module): + def forward(self, x): + return torch.min(x[0], x[1]) + +@basic_unit +class BinarySigmoid(nn.Module): + def forward(self, x): + return torch.sigmoid(x[0]) * x[1] + +@basic_unit class BinaryExpSquare(nn.Module): def __init__(self): super().__init__() self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) - def forward(self, x, y): - return torch.exp(-self.beta * torch.square(x - y)) + def forward(self, x): + return torch.exp(-self.beta * torch.square(x[0] - x[1])) +@basic_unit class BinaryExpAbs(nn.Module): def __init__(self): super().__init__() self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) - def forward(self, x, y): - return torch.exp(-self.beta * torch.abs(x - y)) + def forward(self, x): + return torch.exp(-self.beta * torch.abs(x[0] - x[1])) -class BinaryAdd(nn.Module): +@basic_unit +class BinaryParamAdd(nn.Module): def __init__(self): super().__init__() self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) - def forward(self, x, y): - return self.beta * x + (1 - self.beta) * y - -autoact_unary_funcs = [ - lambda x: x, - lambda x: -x, - lambda x: torch.abs(x), - lambda x: torch.square(x), - lambda x: torch.pow(x, 3), - lambda x: torch.pow(x, 0.5), - #UnaryMul(), - #UnaryAdd(), - lambda x: torch.log(torch.abs(x) + 1e-7), - lambda x: torch.exp(x), - lambda x: torch.sin(x), - lambda x: torch.cos(x), - lambda x: torch.sinh(x), - lambda x: torch.cosh(x), - lambda x: torch.tanh(x), - lambda x: torch.asinh(x), - lambda x: torch.atan(x), - lambda x: torch.sinc(x), - lambda x: torch.max(x, torch.zeros_like(x)), - lambda x: torch.min(x, torch.zeros_like(x)), - lambda x: torch.sigmoid(x), - lambda x: torch.log(1 + torch.exp(x)), - lambda x: torch.exp(-torch.square(x)), - lambda x: torch.erf(x) -] - -autoact_binary_funcs = [ - lambda x, y: x + y, - lambda x, y: x * y, - lambda x, y: x - y, - lambda x, y: x / (y + 1e-7), - lambda x, y: torch.max(x, y), - lambda x, y: torch.min(x, y), - lambda x, y: torch.sigmoid(x) * y, - #BinaryExpSquare(), - #BinaryExpAbs(), - #BinaryAdd() -] - -class UnaryFunctionalModule(nn.Module): - def __init__(self, fn): - super().__init__() - self.fn = fn def forward(self, x): - return self.fn(x) + return self.beta * x[0] + (1 - self.beta) * x[1] + +binary_modules = ['BinaryAdd', 'BinaryMul', 'BinaryMinus', 'BinaryDivide', 'BinaryMax', + 'BinaryMin', 'BinarySigmoid', 'BinaryExpSquare', 'BinaryExpAbs', 'BinaryParamAdd'] -class BinaryFunctionalModule(nn.Module): - def __init__(self, fn): - super().__init__() - self.fn = fn - def forward(self, x, y): - return self.fn(x, y) class AutoActivation(nn.Module): """ """ def __init__(self, unit_num = 1): super().__init__() - unary1_cand = [UnaryFunctionalModule(fn) for fn in autoact_unary_funcs] - unary1_cand.extend([UnaryMul(), UnaryAdd()]) - unary2_cand = [UnaryFunctionalModule(fn) for fn in autoact_unary_funcs] - unary2_cand.extend([UnaryMul(), UnaryAdd()]) - binary_cand = [BinaryFunctionalModule(fn) for fn in autoact_binary_funcs] - binary_cand.extend([BinaryExpSquare(), BinaryExpAbs(), BinaryAdd()]) - self.unary1 = LayerChoice(unary1_cand, label='one_unary') - self.unary2 = LayerChoice(unary2_cand, label='one_unary') - self.binary = LayerChoice(binary_cand) - - def forward(self, x): - return self.binary(self.unary1(x), self.unary2(x)) + self.unary1 = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + self.unary2 = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + self.binary = LayerChoice([eval('{}()'.format(binary)) for binary in binary_modules]) + + def forward(self, x): + return self.binary(torch.stack([self.unary1(x), self.unary2(x)])) diff --git a/nni/retiarii/operation_def/torch_op_def.py b/nni/retiarii/operation_def/torch_op_def.py index bb97069e63..f0cbd04e42 100644 --- a/nni/retiarii/operation_def/torch_op_def.py +++ b/nni/retiarii/operation_def/torch_op_def.py @@ -61,7 +61,7 @@ def to_forward_code(self, field: str, output: str, inputs: List[str], inputs_val # TODO: deal with all the types if self.parameters['type'] == 'None': return f'{output} = None' - elif self.parameters['type'] in ('int', 'float', 'bool', 'int[]'): + elif self.parameters['type'] in ('int', 'float', 'bool', 'int[]'): # 'Long()' ??? return f'{output} = {self.parameters["value"]}' elif self.parameters['type'] == 'str': str_val = self.parameters["value"] @@ -171,7 +171,7 @@ class AtenTensors(PyTorchOperation): 'aten::ones_like', 'aten::zeros_like', 'aten::rand', 'aten::randn', 'aten::scalar_tensor', 'aten::new_full', 'aten::new_empty', 'aten::new_zeros', 'aten::arange', - 'aten::tensor', 'aten::ones', 'aten::zeros'] + 'aten::tensor', 'aten::ones', 'aten::zeros', 'aten::as_tensor'] def to_forward_code(self, field: str, output: str, inputs: List[str], inputs_value: List[Any] = None) -> str: schemas = torch._C._jit_get_schemas_for_operator(self.type) From 4e42cdfd8c7dcc3f2dce5a98d015cce1b9760d19 Mon Sep 17 00:00:00 2001 From: quzha Date: Mon, 28 Jun 2021 20:58:36 +0800 Subject: [PATCH 3/8] support various number of core units --- nni/retiarii/nn/pytorch/hypermodule.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py index 99814bc29d..27ca8601b0 100644 --- a/nni/retiarii/nn/pytorch/hypermodule.py +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -226,9 +226,19 @@ class AutoActivation(nn.Module): """ def __init__(self, unit_num = 1): super().__init__() - self.unary1 = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') - self.unary2 = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') - self.binary = LayerChoice([eval('{}()'.format(binary)) for binary in binary_modules]) - - def forward(self, x): - return self.binary(torch.stack([self.unary1(x), self.unary2(x)])) + self.unit_num = unit_num + self.unaries = nn.ModuleList() + self.binaries = nn.ModuleList() + self.first_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + for i in range(unit_num): + one_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + self.unaries.append(one_unary) + for i in range(unit_num): + one_binary = LayerChoice([eval('{}()'.format(binary)) for binary in binary_modules]) + self.binaries.append(one_binary) + + def forward(self, x): + out = self.first_unary(x) + for unary, binary in zip(self.unaries, self.binaries): + out = binary(torch.stack([out, unary(x)])) + return out From ca8860448247a59d2b4461eadcfb61df5aec2799 Mon Sep 17 00:00:00 2001 From: quzha Date: Mon, 28 Jun 2021 21:51:41 +0800 Subject: [PATCH 4/8] add doc docstring --- docs/en_US/NAS/Hypermodules.rst | 9 +++++++++ docs/en_US/NAS/construct_space.rst | 3 ++- nni/retiarii/nn/pytorch/hypermodule.py | 9 ++++++++- 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 docs/en_US/NAS/Hypermodules.rst diff --git a/docs/en_US/NAS/Hypermodules.rst b/docs/en_US/NAS/Hypermodules.rst new file mode 100644 index 0000000000..e87bf34725 --- /dev/null +++ b/docs/en_US/NAS/Hypermodules.rst @@ -0,0 +1,9 @@ +Hypermodules +============ + +Hypermodule is a (PyTorch) module which contains many architecture/hyperparameter candidates for this module. By using hypermodule in user defined model, NNI will help users automatically find the best architecture/hyperparameter of the hypermodules for this model. This follows the design philosophy of Retiarii that users write DNN model as a space. + +There has been proposed some hypermodules in NAS community, such as AutoActivation, AutoDropout. Some of them are implemented in the Retiarii framework. + +.. autoclass:: nni.retiarii.nn.pytorch.AutoActivation + :members: \ No newline at end of file diff --git a/docs/en_US/NAS/construct_space.rst b/docs/en_US/NAS/construct_space.rst index b32489d4a7..362bb446ea 100644 --- a/docs/en_US/NAS/construct_space.rst +++ b/docs/en_US/NAS/construct_space.rst @@ -8,4 +8,5 @@ NNI provides powerful APIs for users to easily express model space (or search sp :maxdepth: 1 Mutation Primitives - Customize Mutators \ No newline at end of file + Customize Mutators + Hypermodule Lib \ No newline at end of file diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py index 27ca8601b0..d424b03ad3 100644 --- a/nni/retiarii/nn/pytorch/hypermodule.py +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -223,10 +223,17 @@ def forward(self, x): class AutoActivation(nn.Module): """ + This module is an implementation of the paper "Searching for Activation Functions" + (https://arxiv.org/abs/1710.05941). + NOTE: current `beta` is not per-channel parameter + + Parameters + ---------- + unit_num : int + the number of core units """ def __init__(self, unit_num = 1): super().__init__() - self.unit_num = unit_num self.unaries = nn.ModuleList() self.binaries = nn.ModuleList() self.first_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') From 96bde5b7925562050f875587db8b488ae5450f9c Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 29 Jun 2021 13:50:38 +0800 Subject: [PATCH 5/8] fix pylint --- nni/retiarii/nn/pytorch/hypermodule.py | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py index d424b03ad3..d1db4ef22c 100644 --- a/nni/retiarii/nn/pytorch/hypermodule.py +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -1,8 +1,6 @@ # Copyright (c) Microsoft Corporation. # Licensed under the MIT license. -import math - import torch import torch.nn as nn @@ -52,7 +50,7 @@ class UnaryMul(nn.Module): def __init__(self): super().__init__() # element-wise for now, will change to per-channel trainable parameter - self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) # pylint: disable=not-callable def forward(self, x): return x * self.beta @@ -61,7 +59,7 @@ class UnaryAdd(nn.Module): def __init__(self): super().__init__() # element-wise for now, will change to per-channel trainable parameter - self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) # pylint: disable=not-callable def forward(self, x): return x + self.beta @@ -197,7 +195,7 @@ def forward(self, x): class BinaryExpSquare(nn.Module): def __init__(self): super().__init__() - self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) # pylint: disable=not-callable def forward(self, x): return torch.exp(-self.beta * torch.square(x[0] - x[1])) @@ -205,7 +203,7 @@ def forward(self, x): class BinaryExpAbs(nn.Module): def __init__(self): super().__init__() - self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) # pylint: disable=not-callable def forward(self, x): return torch.exp(-self.beta * torch.abs(x[0] - x[1])) @@ -213,7 +211,7 @@ def forward(self, x): class BinaryParamAdd(nn.Module): def __init__(self): super().__init__() - self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) + self.beta = torch.nn.Parameter(torch.tensor(1, dtype=torch.float32)) # pylint: disable=not-callable def forward(self, x): return self.beta * x[0] + (1 - self.beta) * x[1] @@ -237,10 +235,10 @@ def __init__(self, unit_num = 1): self.unaries = nn.ModuleList() self.binaries = nn.ModuleList() self.first_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') - for i in range(unit_num): + for _ in range(unit_num): one_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') self.unaries.append(one_unary) - for i in range(unit_num): + for _ in range(unit_num): one_binary = LayerChoice([eval('{}()'.format(binary)) for binary in binary_modules]) self.binaries.append(one_binary) From f081b8ea0495179295936e5780fe075176f51359 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 13 Jul 2021 16:35:20 +0800 Subject: [PATCH 6/8] add ut for autoactivation --- nni/retiarii/nn/pytorch/hypermodule.py | 4 ++-- test/ut/retiarii/test_highlevel_apis.py | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/nni/retiarii/nn/pytorch/hypermodule.py b/nni/retiarii/nn/pytorch/hypermodule.py index d1db4ef22c..86cd00ea93 100644 --- a/nni/retiarii/nn/pytorch/hypermodule.py +++ b/nni/retiarii/nn/pytorch/hypermodule.py @@ -234,9 +234,9 @@ def __init__(self, unit_num = 1): super().__init__() self.unaries = nn.ModuleList() self.binaries = nn.ModuleList() - self.first_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + self.first_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules]) for _ in range(unit_num): - one_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules], label='one_unary') + one_unary = LayerChoice([eval('{}()'.format(unary)) for unary in unary_modules]) self.unaries.append(one_unary) for _ in range(unit_num): one_binary = LayerChoice([eval('{}()'.format(binary)) for binary in binary_modules]) diff --git a/test/ut/retiarii/test_highlevel_apis.py b/test/ut/retiarii/test_highlevel_apis.py index db24f7ec3c..d32fdac05d 100644 --- a/test/ut/retiarii/test_highlevel_apis.py +++ b/test/ut/retiarii/test_highlevel_apis.py @@ -544,3 +544,21 @@ def forward(self, x): except InvalidMutation: continue self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(2, 10)).size() == torch.Size([2, 16])) + + def test_autoactivation(self): + @self.get_serializer() + class Net(nn.Module): + def __init__(self): + super().__init__() + self.act = nn.AutoActivation() + + def forward(self, x): + return self.act(x) + + raw_model, mutators = self._get_model_with_mutators(Net()) + for _ in range(10): + sampler = EnumerateSampler() + model = raw_model + for mutator in mutators: + model = mutator.bind_sampler(sampler).apply(model) + self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(2, 10)).size() == torch.Size([2, 10])) \ No newline at end of file From 185b8ca6f47cb670ae3fedfa3a1318f9cf6055a1 Mon Sep 17 00:00:00 2001 From: quzha Date: Tue, 13 Jul 2021 16:52:25 +0800 Subject: [PATCH 7/8] add one more test for base engine --- test/ut/retiarii/test_highlevel_apis.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/test/ut/retiarii/test_highlevel_apis.py b/test/ut/retiarii/test_highlevel_apis.py index d32fdac05d..31ca79261b 100644 --- a/test/ut/retiarii/test_highlevel_apis.py +++ b/test/ut/retiarii/test_highlevel_apis.py @@ -493,6 +493,23 @@ def forward(self, x): model = mutator.bind_sampler(sampler).apply(model) self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(1, 16)).size() == torch.Size([1, 64])) + def test_autoactivation(self): + class Net(nn.Module): + def __init__(self): + super().__init__() + self.act = nn.AutoActivation() + + def forward(self, x): + return self.act(x) + + raw_model, mutators = self._get_model_with_mutators(Net()) + for _ in range(10): + sampler = EnumerateSampler() + model = raw_model + for mutator in mutators: + model = mutator.bind_sampler(sampler).apply(model) + self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(2, 10)).size() == torch.Size([2, 10])) + class Python(GraphIR): def _get_converted_pytorch_model(self, model_ir): From 5d8fe77bfc57daad7a09ad291aa753fc6e113d68 Mon Sep 17 00:00:00 2001 From: quzha Date: Wed, 14 Jul 2021 19:15:02 +0800 Subject: [PATCH 8/8] remove redundant test --- test/ut/retiarii/test_highlevel_apis.py | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/test/ut/retiarii/test_highlevel_apis.py b/test/ut/retiarii/test_highlevel_apis.py index 31ca79261b..8ee2a2699d 100644 --- a/test/ut/retiarii/test_highlevel_apis.py +++ b/test/ut/retiarii/test_highlevel_apis.py @@ -494,6 +494,7 @@ def forward(self, x): self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(1, 16)).size() == torch.Size([1, 64])) def test_autoactivation(self): + @self.get_serializer() class Net(nn.Module): def __init__(self): super().__init__() @@ -561,21 +562,3 @@ def forward(self, x): except InvalidMutation: continue self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(2, 10)).size() == torch.Size([2, 16])) - - def test_autoactivation(self): - @self.get_serializer() - class Net(nn.Module): - def __init__(self): - super().__init__() - self.act = nn.AutoActivation() - - def forward(self, x): - return self.act(x) - - raw_model, mutators = self._get_model_with_mutators(Net()) - for _ in range(10): - sampler = EnumerateSampler() - model = raw_model - for mutator in mutators: - model = mutator.bind_sampler(sampler).apply(model) - self.assertTrue(self._get_converted_pytorch_model(model)(torch.randn(2, 10)).size() == torch.Size([2, 10])) \ No newline at end of file