-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathshape_result.txt
93 lines (88 loc) · 7.07 KB
/
shape_result.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
Total sparse weights: 74.539 (12815247/17192650)
Sparse weight by layer
Layer: conv1_1.weight, 0.47511574074074076 (821/1728)
Layer: conv2_1.weight, 0.533203125 (39312/73728)
Layer: conv3_1.weight, 0.6095547146267362 (179765/294912)
Layer: fc1.weight, 0.7493541836738586 (12572077/16777216)
Layer: fc1.bias, 0.676513671875 (2771/4096)
Layer: fc2.weight, 0.500390625 (20496/40960)
Layer: fc2.bias, 0.5 (5/10)
Sparse weight by filter
Layer: conv1_1.weight, [14, 12, 11, 15, 11, 17, 11, 12, 12, 8, 13, 12, 10, 14, 12, 12, 13, 15, 15, 11, 15, 9, 11, 12, 15, 13, 12, 12, 13, 14, 11, 9, 12, 13, 14, 12, 27, 12, 17, 13, 14, 12, 9, 12, 12, 9, 14, 16, 7, 13, 12, 14, 15, 11, 10, 12, 9, 14, 12, 15, 11, 27, 13, 12]
Layer: conv2_1.weight, [245, 207, 337, 340, 286, 263, 348, 221, 302, 289, 266, 576, 268, 343, 256, 268, 237, 212, 422, 404, 397, 310, 211, 576, 295, 358, 363, 364, 322, 272, 303, 322, 316, 576, 335, 266, 264, 373, 268, 234, 405, 386, 336, 264, 270, 261, 291, 312, 257, 293, 294, 270, 328, 355, 272, 269, 322, 234, 274, 245, 334, 230, 297, 257, 256, 278, 278, 357, 302, 396, 243, 281, 324, 244, 292, 304, 303, 272, 375, 293, 271, 276, 282, 313, 316, 410, 576, 576, 274, 249, 233, 227, 233, 310, 299, 257, 234, 375, 363, 284, 369, 297, 272, 244, 350, 295, 329, 312, 382, 252, 186, 307, 290, 398, 305, 243, 317, 402, 271, 256, 265, 380, 308, 250, 228, 251, 239, 287]
Layer: conv3_1.weight, [614, 692, 613, 576, 616, 628, 667, 634, 698, 582, 622, 623, 602, 610, 613, 626, 610, 567, 639, 616, 654, 637, 608, 587, 1135, 1152, 621, 612, 661, 611, 1152, 627, 625, 685, 645, 620, 620, 576, 628, 617, 675, 608, 561, 645, 570, 593, 1152, 565, 618, 630, 587, 1152, 1089, 627, 628, 658, 685, 1131, 624, 676, 606, 1152, 597, 1152, 622, 1089, 1129, 644, 619, 632, 586, 623, 606, 708, 576, 616, 565, 646, 642, 646, 616, 604, 1152, 1152, 630, 603, 1138, 616, 685, 623, 593, 584, 635, 594, 645, 646, 637, 618, 658, 693, 619, 583, 703, 609, 607, 1097, 593, 647, 598, 609, 643, 665, 1152, 1152, 635, 619, 656, 628, 586, 610, 578, 631, 620, 638, 658, 610, 578, 676, 674, 1152, 695, 629, 610, 638, 594, 652, 1152, 668, 595, 577, 628, 632, 594, 629, 625, 1152, 585, 603, 730, 599, 609, 581, 1152, 610, 601, 644, 1152, 584, 1152, 587, 599, 1152, 592, 638, 606, 631, 586, 673, 620, 1152, 1152, 625, 644, 1152, 631, 615, 607, 631, 566, 599, 653, 577, 591, 633, 670, 638, 624, 573, 641, 608, 612, 616, 646, 593, 647, 624, 1123, 598, 603, 615, 616, 1152, 613, 1148, 1134, 596, 621, 597, 634, 594, 680, 727, 626, 585, 697, 1152, 553, 592, 613, 613, 1152, 1152, 616, 628, 1152, 611, 601, 612, 1152, 610, 605, 685, 607, 623, 1144, 1152, 572, 632, 600, 632, 653, 588, 1152, 628, 675, 615, 598, 626, 643, 649, 723, 674, 601, 607, 595, 643]
Layer: fc1.weight, None
Layer: fc1.bias, None
Layer: fc2.weight, None
Layer: fc2.bias, None
========== epoch 0
Train: Loss: 8.036, Acc: 68.242 (34121/50000)
Train Time: 131.783
Test: Loss: 1.809, Acc: 67.682 (33841/50000)
Test Time: 7.685
========== epoch 1
Train: Loss: 6.771, Acc: 67.680 (33840/50000)
Train Time: 132.052
Test: Loss: 1.821, Acc: 67.504 (33752/50000)
Test Time: 7.698
========== epoch 2
Train: Loss: 5.960, Acc: 66.878 (33439/50000)
Train Time: 131.590
Test: Loss: 1.836, Acc: 67.126 (33563/50000)
Test Time: 7.753
========== epoch 3
Train: Loss: 5.372, Acc: 66.158 (33079/50000)
Train Time: 131.751
Test: Loss: 1.855, Acc: 65.642 (32821/50000)
Test Time: 7.695
========== epoch 4
Train: Loss: 4.924, Acc: 65.320 (32660/50000)
Train Time: 131.475
Test: Loss: 1.872, Acc: 64.474 (32237/50000)
Test Time: 7.697
========== epoch 5
Train: Loss: 4.571, Acc: 64.140 (32070/50000)
Train Time: 131.704
Test: Loss: 1.891, Acc: 63.292 (31646/50000)
Test Time: 7.681
========== epoch 6
Train: Loss: 4.287, Acc: 63.242 (31621/50000)
Train Time: 131.587
Test: Loss: 1.904, Acc: 61.790 (30895/50000)
Test Time: 7.690
========== epoch 7
Train: Loss: 4.058, Acc: 62.016 (31008/50000)
Train Time: 131.575
Test: Loss: 1.913, Acc: 61.814 (30907/50000)
Test Time: 7.749
========== epoch 8
Train: Loss: 3.867, Acc: 61.110 (30555/50000)
Train Time: 131.480
Test: Loss: 1.926, Acc: 60.328 (30164/50000)
Test Time: 7.685
========== epoch 9
Train: Loss: 3.709, Acc: 60.446 (30223/50000)
Train Time: 131.318
Test: Loss: 1.936, Acc: 60.148 (30074/50000)
Test Time: 7.746
Best Training Accuracy: 68.242%
Best Test Accuracy: 67.682%
Total sparse weights: 71.448 (12283729/17192650)
Sparse weight by layer
Layer: conv1_1.weight, 0.4739583333333333 (819/1728)
Layer: conv2_1.weight, 0.5162082248263888 (38059/73728)
Layer: conv3_1.weight, 0.6270650227864584 (184929/294912)
Layer: fc1.weight, 0.7175077795982361 (12037783/16777216)
Layer: fc1.bias, 0.662109375 (2712/4096)
Layer: fc2.weight, 0.474169921875 (19422/40960)
Layer: fc2.bias, 0.5 (5/10)
Sparse weight by filter
Layer: conv1_1.weight, [14, 13, 10, 15, 10, 17, 11, 12, 12, 8, 13, 12, 10, 14, 12, 12, 13, 15, 16, 11, 15, 9, 11, 12, 15, 13, 12, 12, 13, 14, 11, 9, 12, 13, 14, 12, 27, 12, 17, 13, 14, 12, 9, 12, 10, 8, 14, 16, 7, 13, 12, 14, 15, 12, 10, 12, 9, 14, 12, 15, 11, 27, 13, 12]
Layer: conv2_1.weight, [207, 184, 361, 359, 278, 264, 386, 179, 303, 291, 247, 576, 266, 358, 217, 254, 197, 148, 392, 429, 373, 297, 173, 576, 275, 369, 375, 363, 345, 284, 309, 316, 323, 576, 326, 253, 264, 409, 261, 162, 340, 418, 372, 255, 245, 233, 277, 303, 251, 321, 287, 256, 345, 366, 272, 212, 320, 191, 263, 206, 344, 211, 312, 250, 230, 268, 263, 374, 316, 359, 202, 281, 345, 204, 285, 305, 304, 245, 395, 262, 268, 266, 262, 308, 305, 434, 576, 576, 255, 206, 188, 169, 192, 332, 304, 210, 221, 392, 390, 290, 378, 261, 250, 236, 365, 299, 347, 340, 403, 207, 129, 317, 286, 421, 273, 216, 328, 439, 263, 224, 218, 311, 291, 238, 166, 203, 225, 268]
Layer: conv3_1.weight, [578, 680, 694, 576, 568, 649, 747, 666, 743, 563, 622, 637, 626, 585, 599, 689, 617, 518, 670, 657, 606, 644, 648, 559, 1152, 1152, 632, 596, 731, 596, 1152, 653, 605, 777, 567, 625, 638, 490, 547, 513, 692, 660, 531, 597, 532, 553, 1152, 576, 626, 565, 602, 1152, 1126, 665, 649, 794, 1152, 57, 636, 701, 607, 1152, 613, 1152, 544, 906, 1152, 701, 593, 656, 558, 579, 559, 841, 544, 615, 588, 1152, 639, 670, 642, 627, 1152, 1152, 645, 575, 1152, 606, 725, 659, 649, 567, 662, 617, 640, 706, 718, 624, 686, 812, 568, 518, 1150, 549, 592, 1152, 636, 633, 552, 683, 650, 622, 1152, 1152, 700, 733, 598, 681, 537, 601, 531, 1150, 609, 655, 701, 606, 588, 950, 738, 1152, 771, 635, 619, 655, 602, 746, 1152, 592, 554, 547, 642, 669, 619, 587, 616, 1152, 588, 597, 795, 616, 669, 598, 1152, 596, 659, 654, 1152, 602, 1152, 571, 622, 1152, 625, 687, 608, 700, 591, 755, 636, 1152, 1152, 538, 743, 1152, 666, 684, 592, 670, 570, 606, 694, 564, 581, 640, 857, 594, 639, 588, 719, 667, 687, 630, 696, 586, 688, 673, 1152, 591, 648, 604, 630, 1152, 659, 1152, 1152, 610, 663, 579, 676, 702, 737, 1144, 594, 575, 736, 1152, 553, 608, 1152, 627, 1152, 1152, 587, 600, 1152, 661, 600, 635, 1152, 631, 599, 747, 599, 645, 1152, 1152, 519, 661, 602, 603, 770, 544, 1152, 673, 672, 645, 607, 579, 642, 1142, 741, 729, 626, 617, 600, 617]
Layer: fc1.weight, None
Layer: fc1.bias, None
Layer: fc2.weight, None
Layer: fc2.bias, None
Test: Loss: 1.935, Acc: 60.172 (30086/50000)
Final test accuracy: 60.172000000000004