Skip to content

Commit

Permalink
Initial commit (#2670)
Browse files Browse the repository at this point in the history
Signed-off-by: Kim, Vinnam <vinnam.kim@intel.com>
  • Loading branch information
vinnamkim authored Nov 30, 2023
1 parent 291b716 commit e538298
Show file tree
Hide file tree
Showing 106 changed files with 3,946 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/labeler.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ on:
- pull_request_target

jobs:
triage:
labeler:
permissions:
contents: read
pull-requests: write
Expand Down
3 changes: 3 additions & 0 deletions NOTICE
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
This repository is under Apache License Version 2.0 licence. However the following files are borrowed from the external sources under different licenses.

- MIT License: `src/otx/core/engine/**/*.py`
38 changes: 38 additions & 0 deletions for_developers/dir_structure.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
```console
root/
algo/ # Custom algo (e.g., hierarchical_cls_head)
cli/ # CLI entrypoints
config/ # Default YAML config files
core/
config/ # Structured data type object for configurations
data/ # Data related things
dataset/ # OTXDataset
base.py
detection.py
...
entity/ # OTXDataEntity
base.py
detection.py
...
transform_libs/ # To support transform libraries (e.g., MMCV)
factory.py # Factory to instantiate data related objects
module.py # OTXDataModule
engine/ # PyTorchLightning engine
train.py
...
model/ # Model related things
entity/ # OTXModel
base.py
detection.py
...
module/ # OTXLitModule
base.py
detection.py
...
types/ # Enum definitions (e.g. OTXTaskType)
utils/ # Utility functions
recipe/ # Recipe YAML config for each model we support
detection/ # (e.g., rtmdet_tiny)
...
tools/ # Python runnable scripts for some TBD use cases
```
101 changes: 101 additions & 0 deletions for_developers/requirements-lock.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
apturl==0.5.2
attrs==23.1.0
bandit==1.7.4
black==23.9.1
blinker==1.4
Brlapi==0.8.3
cachetools==5.3.0
certifi==2020.6.20
cfgv==3.4.0
chardet==5.1.0
click==8.0.4
colorama==0.4.6
command-not-found==0.3
coverage==7.3.2
cryptography==3.4.8
cupshelpers==1.0
dbus-python==1.2.18
defer==1.0.6
distlib==0.3.6
distro==1.7.0
distro-info===1.1build1
exceptiongroup==1.1.1
filelock==3.10.7
gitdb==4.0.11
GitPython==3.1.40
httplib2==0.20.2
identify==2.5.31
idna==3.3
importlib-metadata==4.6.4
iniconfig==2.0.0
jeepney==0.7.1
keyring==23.5.0
language-selector==0.1
launchpadlib==1.10.16
lazr.restfulclient==0.14.4
lazr.uri==1.0.6
louis==3.20.0
macaroonbakery==1.3.1
maturin==1.1.0
more-itertools==8.10.0
mypy==1.6.0
mypy-extensions==1.0.0
netifaces==0.11.0
nodeenv==1.8.0
oauthlib==3.2.0
olefile==0.46
packaging==23.0
pathspec==0.11.2
pbr==5.11.1
pexpect==4.8.0
Pillow==9.0.1
platformdirs==2.5.1
pluggy==1.0.0
pre-commit==2.15.0
protobuf==3.12.4
ptyprocess==0.7.0
py==1.10.0
pycairo==1.20.1
pycups==2.0.1
PyGObject==3.42.1
PyJWT==2.3.0
pymacaroons==0.13.0
PyNaCl==1.5.0
pyparsing==2.4.7
pyproject_api==1.5.1
pyRFC3339==1.1
pytest==6.2.5
pytest-cov==2.11.1
pytest-html==3.2.0
pytest-metadata==2.0.4
python-apt==2.3.0+ubuntu2.1
python-dateutil==2.8.1
python-debian===0.1.43ubuntu1
pytz==2022.1
pyxdg==0.27
PyYAML==5.4.1
reportlab==3.6.8
requests==2.25.1
ruff==0.0.292
screen-resolution-extra==0.0.0
SecretStorage==3.3.1
six==1.16.0
smmap==5.0.1
ssh-import-id==5.11
stevedore==5.1.0
systemd-python==234
testfixtures==7.0.0
toml==0.10.2
tomli==2.0.1
tox==4.4.8
typing_extensions==4.8.0
ubuntu-advantage-tools==27.9
ubuntu-drivers-common==0.0.0
ufw==0.36.1
unattended-upgrades==0.1
urllib3==1.26.5
virtualenv==20.21.0
wadllib==1.3.6
xdg==5
xkit==0.0.0
zipp==1.0.0
32 changes: 32 additions & 0 deletions for_developers/setup_guide.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# How to setup dev env

## Installation

```console
# Create venv from conda
conda create -n otx-v2 python=3.11
conda activate otx-v2

# Install PyTorch and TorchVision
conda install pytorch torchvision torchaudio pytorch-cuda=11.8 -c pytorch -c nvidia

# Install core dependency
pip install lightning datumaro omegaconf hydra-core

# Install mmcv (mmdet)
pip install -U openmim
mim install mmengine "mmcv>=2.0.0" mmdet

# Install this package (Sorry the installation step is not configured yet, until that please setup PYTHONPATH env var as follows)
export PYTHONPATH=${PYTHONPATH}:${PWD}/src
```

Please see [requirements-lock.txt](requirements-lock.txt). This is what I got after the above installation steps by `pip freeze`.

## Launch training with demo recipe

```
# Please check whether your PYTHONPATH is correctly setup first
python src/otx/cli/train.py +recipe=detection/atss_r50_fpn base.data_dir=tests/assets/car_tree_bug model.otx_model.config.bbox_head.num_classes=3 trainer.max_epochs=50 trainer.check_val_every_n_epoch=10 trainer=gpu
```
23 changes: 19 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -122,8 +122,6 @@ python_version = 3.8
ignore_missing_imports = true
show_error_codes = true

# TODO: Need to be edited
follow_imports = "skip"


# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
Expand Down Expand Up @@ -191,16 +189,27 @@ ignore = [
# pydocstyle
# On top of the Google convention, disable `D417`, which requires
# documentation for every function parameter.
"D417", # Missing argument descriptions in the docstring
"D417", # Missing argument descriptions in the docstring

"D107", # Missing docstring in `__init__`
"D105", # Missing docstring in magic method

# flake8-annotations
"ANN101", # Missing-type-self
"ANN002", # Missing type annotation for *args
"ANN003", # Missing type annotation for **kwargs

"ANN102", # Missing type annotation for `cls` in classmethod
"ANN204", # Missing return type annotation for special method `__init__`

"ARG002", # Unused method argument

# flake8-type-checking
"TCH001", # typing-only-first-party-import, Sometimes this causes an incorrect error.
# flake8-fixme
"FIX002" # line-contains-todo
"FIX002", # line-contains-todo

"UP007" # Use `X | Y` for type annotations
]

# Allow autofix for all enabled rules (when `--fix`) is provided.
Expand Down Expand Up @@ -232,6 +241,9 @@ exclude = [
"dist",
"node_modules",
"venv",
"tests/assets",

"src/otx/core/engine/utils/*" # It will be clean up later
]

# Same as Black.
Expand Down Expand Up @@ -265,6 +277,9 @@ max-returns = 10
"ANN001", # Skip annotation type hint in test codes
"D", # Test skips missing docstring argument with magic (fixture) methods.
]
"src/otx/**/*.py" = [
"ERA001",
]

[tool.ruff.pydocstyle]
convention = "google"
4 changes: 4 additions & 0 deletions src/otx/algo/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""Module for OTX custom algorithms, e.g., model, losses, hook, etc..."""
4 changes: 4 additions & 0 deletions src/otx/algo/model/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""Module for OTX custom model."""
4 changes: 4 additions & 0 deletions src/otx/algo/model/hierarchical_cls_head.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""Hierarchical classification head PyTorch module."""
4 changes: 4 additions & 0 deletions src/otx/cli/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""CLI entrypoints."""
41 changes: 41 additions & 0 deletions src/otx/cli/train.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Copyright (C) 2023 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
"""CLI entrypoint for training."""
# ruff: noqa

import hydra
from omegaconf import DictConfig

from otx.core.config import register_configs

register_configs()


@hydra.main(version_base="1.3", config_path="../config", config_name="train.yaml")
def main(cfg: DictConfig) -> None:
"""Main entry point for training.
:param cfg: DictConfig configuration composed by Hydra.
:return: Optional[float] with optimized metric value.
"""
from otx.core.engine.train import train

# apply extra utilities
# (e.g. ask for tags if none are provided in cfg, print cfg tree, etc.)
# utils.extras(cfg)

# train the model
metric_dict, _ = train(cfg)

# # safely retrieve metric value for hydra-based hyperparameter optimization
# metric_value = utils.get_metric_value(
# metric_dict=metric_dict, metric_name=cfg.get("optimized_metric")
# )

# # return optimized metric
# return metric_value


if __name__ == "__main__":
main()
4 changes: 4 additions & 0 deletions src/otx/config/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
"""OTX default YAML configuration file collection.
this file is needed here to include configs when building project as a package
"""
22 changes: 22 additions & 0 deletions src/otx/config/base/default.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# OTX task enum
task: ???

# # simply provide checkpoint path to resume training
# ckpt_path: null

# # seed for random number generators in pytorch, numpy and python.random
# seed: null

# path to working directory
work_dir: ${hydra:runtime.cwd}

# path to data directory
data_dir: ${base.work_dir}/data/

# path to logging directory
log_dir: ${base.work_dir}/logs/

# path to output directory, created dynamically by hydra
# path generation pattern is specified in `configs/hydra/default.yaml`
# use it to store all files generated during the run, like ckpts and metrics
output_dir: ${hydra:runtime.output_dir}
4 changes: 4 additions & 0 deletions src/otx/config/base/detection.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
defaults:
- default

task: DETECTION
22 changes: 22 additions & 0 deletions src/otx/config/callbacks/default.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
defaults:
- model_checkpoint
- early_stopping
- model_summary
- rich_progress_bar
- _self_

model_checkpoint:
dirpath: ${paths.output_dir}/checkpoints
filename: "epoch_{epoch:03d}"
monitor: "val/acc"
mode: "max"
save_last: True
auto_insert_metric_name: False

early_stopping:
monitor: "val/acc"
patience: 100
mode: "max"

model_summary:
max_depth: -1
15 changes: 15 additions & 0 deletions src/otx/config/callbacks/early_stopping.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.EarlyStopping.html

early_stopping:
_target_: lightning.pytorch.callbacks.EarlyStopping
monitor: ??? # quantity to be monitored, must be specified !!!
min_delta: 0. # minimum change in the monitored quantity to qualify as an improvement
patience: 3 # number of checks with no improvement after which training will be stopped
verbose: False # verbosity mode
mode: "min" # "max" means higher metric value is better, can be also "min"
strict: True # whether to crash the training if monitor is not found in the validation metrics
check_finite: True # when set True, stops training when the monitor becomes NaN or infinite
stopping_threshold: null # stop training immediately once the monitored quantity reaches this threshold
divergence_threshold: null # stop training as soon as the monitored quantity becomes worse than this threshold
check_on_train_epoch_end: null # whether to run early stopping at the end of the training epoch
# log_rank_zero_only: False # this keyword argument isn't available in stable version
17 changes: 17 additions & 0 deletions src/otx/config/callbacks/model_checkpoint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# https://lightning.ai/docs/pytorch/stable/api/lightning.pytorch.callbacks.ModelCheckpoint.html

model_checkpoint:
_target_: lightning.pytorch.callbacks.ModelCheckpoint
dirpath: null # directory to save the model file
filename: null # checkpoint filename
monitor: null # name of the logged metric which determines when model is improving
verbose: False # verbosity mode
save_last: null # additionally always save an exact copy of the last checkpoint to a file last.ckpt
save_top_k: 1 # save k best models (determined by above metric)
mode: "min" # "max" means higher metric value is better, can be also "min"
auto_insert_metric_name: True # when True, the checkpoints filenames will contain the metric name
save_weights_only: False # if True, then only the model’s weights will be saved
every_n_train_steps: null # number of training steps between checkpoints
train_time_interval: null # checkpoints are monitored at the specified time interval
every_n_epochs: null # number of epochs between checkpoints
save_on_train_epoch_end: null # whether to run checkpointing at the end of the training epoch or the end of validation
Loading

0 comments on commit e538298

Please sign in to comment.