Skip to content
This repository has been archived by the owner on Sep 18, 2024. It is now read-only.

Commit

Permalink
fix flops counter bug in auto_pruners_torch.py
Browse files Browse the repository at this point in the history
  • Loading branch information
linbinskn committed Jan 5, 2021
1 parent 05534f3 commit 5f9ea2f
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions examples/model_compress/auto_pruners_torch.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ def evaluator(model):
# used to save the performance of the original & pruned & finetuned models
result = {'flops': {}, 'params': {}, 'performance':{}}

flops, params = count_flops_params(model, get_input_size(args.dataset))
flops, params, _ = count_flops_params(model, get_input_size(args.dataset))
result['flops']['original'] = flops
result['params']['original'] = params

Expand Down Expand Up @@ -337,7 +337,7 @@ def evaluator(model):

torch.save(model.state_dict(), os.path.join(args.experiment_data_dir, 'model_speed_up.pth'))
print('Speed up model saved to %s', args.experiment_data_dir)
flops, params = count_flops_params(model, get_input_size(args.dataset))
flops, params, _ = count_flops_params(model, get_input_size(args.dataset))
result['flops']['speedup'] = flops
result['params']['speedup'] = params

Expand Down

0 comments on commit 5f9ea2f

Please sign in to comment.