Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
Fix v2 level pruner default config bug (#4245)
Browse files Browse the repository at this point in the history
  • Loading branch information
Fiascolsy authored Oct 13, 2021
1 parent c9cd53a commit abb4dfd
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 11 deletions.
10 changes: 1 addition & 9 deletions nni/algorithms/compression/v2/pytorch/base/compressor.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from torch.nn import Module

from nni.common.graph_utils import TorchModuleGraph
from nni.algorithms.compression.v2.pytorch.utils import get_module_by_name
from nni.algorithms.compression.v2.pytorch.utils.pruning import get_module_by_name, weighted_modules

_logger = logging.getLogger(__name__)

Expand All @@ -32,14 +32,6 @@ def _setattr(model: Module, name: str, module: Module):
raise '{} not exist.'.format(name)


weighted_modules = [
'Conv1d', 'Conv2d', 'Conv3d', 'ConvTranspose1d', 'ConvTranspose2d', 'ConvTranspose3d',
'Linear', 'Bilinear',
'PReLU',
'Embedding', 'EmbeddingBag',
]


class Compressor:
"""
The abstract base pytorch compressor.
Expand Down
16 changes: 14 additions & 2 deletions nni/algorithms/compression/v2/pytorch/utils/pruning.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@
from torch import Tensor
from torch.nn import Module

weighted_modules = [
'Conv1d', 'Conv2d', 'Conv3d', 'ConvTranspose1d', 'ConvTranspose2d', 'ConvTranspose3d',
'Linear', 'Bilinear',
'PReLU',
'Embedding', 'EmbeddingBag',
]


def config_list_canonical(model: Module, config_list: List[Dict]) -> List[Dict]:
'''
Expand Down Expand Up @@ -37,6 +44,12 @@ def config_list_canonical(model: Module, config_list: List[Dict]) -> List[Dict]:
else:
config['sparsity_per_layer'] = config.pop('sparsity')

for config in config_list:
if 'op_types' in config:
if 'default' in config['op_types']:
config['op_types'].remove('default')
config['op_types'].extend(weighted_modules)

for config in config_list:
if 'op_partial_names' in config:
op_names = []
Expand Down Expand Up @@ -225,18 +238,17 @@ def get_model_weights_numel(model: Module, config_list: List[Dict], masks: Dict[
model_weights_numel[module_name] = module.weight.data.numel()
return model_weights_numel, masked_rate


# FIXME: to avoid circular import, copy this function in this place
def get_module_by_name(model, module_name):
"""
Get a module specified by its module name
Parameters
----------
model : pytorch model
the pytorch model from which to get its module
module_name : str
the name of the required module
Returns
-------
module, module
Expand Down

0 comments on commit abb4dfd

Please sign in to comment.