Skip to content

Commit

Permalink
Commit with webpage homepage setup.
Browse files Browse the repository at this point in the history
  • Loading branch information
jwzhanggy committed Jul 6, 2024
1 parent 8de0d82 commit 2b8db2b
Show file tree
Hide file tree
Showing 5 changed files with 51 additions and 5 deletions.
37 changes: 37 additions & 0 deletions examples/image/mnist.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"id": "initial_id",
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
""
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.6"
}
},
"nbformat": 4,
"nbformat_minor": 5
}
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from setuptools import setup, find_packages

__version__ = '0.1.0.post7'
__version__ = '0.1.0.post8'

requirements = [
"pip>=23.3",
Expand Down
11 changes: 10 additions & 1 deletion tests/unit_tests/test_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import pytest
#import pytest
import torch
from tinybig.config import rpn_config
from tinybig.util import set_random_seed
from tinybig.learner import backward_learner

config_obj = rpn_config(name='test_rpn_config.yaml')
config = config_obj.load_yaml(cache_dir='./configs', config_file='test_rpn_config.yaml')
Expand All @@ -20,6 +22,13 @@
data_obj, model_obj, learner_obj, metric_obj, result_obj = [object_dict[name] for name in
['data', 'model', 'learner', 'metric',
'result']]

optimizer = torch.optim.AdamW(lr=0.001, params=model_obj.parameters())

learner_obj = backward_learner(n_epochs=100, optimizer=optimizer,
loss=torch.nn.CrossEntropyLoss(),
lr_scheduler=torch.optim.lr_scheduler.ExponentialLR(gamma=0.99, optimizer=optimizer))

print('parameter num: ', sum([parameter.numel() for parameter in model_obj.parameters()]))
# ---- objection initialization setction -----

Expand Down
2 changes: 1 addition & 1 deletion tinybig/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
- TBD
"""

__version__ = '0.1.0.post7'
__version__ = '0.1.0.post8'

from . import model, module, config
from . import remainder, expansion, compression, reconciliation
Expand Down
4 changes: 2 additions & 2 deletions tinybig/learner/backward_learner.py
Original file line number Diff line number Diff line change
Expand Up @@ -186,14 +186,14 @@ def train(
optimizer = get_obj_from_str(self.optimizer)(**self.optimizer_parameters)
else:
assert self.optimizer is not None
optimizer = self.optimizer(params=model.parameters())
optimizer = self.optimizer

if type(self.lr_scheduler) is str:
self.lr_scheduler_parameters['optimizer'] = optimizer
lr_scheduler = get_obj_from_str(self.lr_scheduler)(**self.lr_scheduler_parameters)
else:
if self.lr_scheduler is not None:
lr_scheduler = self.optimizer(optimizer=optimizer)
lr_scheduler = self.lr_scheduler
else:
lr_scheduler = None
# ----------------------------
Expand Down

0 comments on commit 2b8db2b

Please sign in to comment.