diff --git a/docs/source/guide/explanation/algorithms/action/action_classification.rst b/docs/source/guide/explanation/algorithms/action/action_classification.rst index c570da5b842..be1b81b2a72 100644 --- a/docs/source/guide/explanation/algorithms/action/action_classification.rst +++ b/docs/source/guide/explanation/algorithms/action/action_classification.rst @@ -30,7 +30,7 @@ Models Currently OpenVINO™ Training Extensions supports `X3D `_ and `MoViNet `_ for action classification. +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------+---------------------+-------------------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +========================================================================================================================================================================================+=========+=====================+=========================+ | `Custom_Action_Classification_X3D `_ | X3D | 2.49 | 3.79 | +----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------+---------------------+-------------------------+ diff --git a/docs/source/guide/explanation/algorithms/action/action_detection.rst b/docs/source/guide/explanation/algorithms/action/action_detection.rst index b36b2bb0bec..1eae4fae0d3 100644 --- a/docs/source/guide/explanation/algorithms/action/action_detection.rst +++ b/docs/source/guide/explanation/algorithms/action/action_detection.rst @@ -27,7 +27,7 @@ Models We support the following ready-to-use model recipes for transfer learning: +-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------+---------------------+-------------------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +=========================================================================================================================================================================================+===============+=====================+=========================+ | `Custom_Action_Detection_X3D_FAST_RCNN `_ | x3d_fast_rcnn | 13.04 | 8.32 | +-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------+---------------------+-------------------------+ diff --git a/docs/source/guide/explanation/algorithms/classification/multi_class_classification.rst b/docs/source/guide/explanation/algorithms/classification/multi_class_classification.rst index 3600828570b..0966c047a15 100644 --- a/docs/source/guide/explanation/algorithms/classification/multi_class_classification.rst +++ b/docs/source/guide/explanation/algorithms/classification/multi_class_classification.rst @@ -56,7 +56,7 @@ Models We support the following ready-to-use model recipes: +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+-----------------------+---------------------+-----------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +==================================================================================================================================================================================================================+=======================+=====================+=================+ | `Custom_Image_Classification_MobileNet-V3-large-1x `_ | MobileNet-V3-large-1x | 0.44 | 4.29 | +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+-----------------------+---------------------+-----------------+ diff --git a/docs/source/guide/explanation/algorithms/object_detection/object_detection.rst b/docs/source/guide/explanation/algorithms/object_detection/object_detection.rst index 6f13b3dd25d..3dd3fbc0349 100644 --- a/docs/source/guide/explanation/algorithms/object_detection/object_detection.rst +++ b/docs/source/guide/explanation/algorithms/object_detection/object_detection.rst @@ -57,7 +57,7 @@ Models We support the following ready-to-use model recipes: +------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------------+---------------------+-----------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +============================================================================================================================================================+=====================+=====================+=================+ | `Custom_Object_Detection_YOLOX `_ | YOLOX-TINY | 6.5 | 20.4 | +------------------------------------------------------------------------------------------------------------------------------------------------------------+---------------------+---------------------+-----------------+ diff --git a/docs/source/guide/explanation/algorithms/segmentation/semantic_segmentation.rst b/docs/source/guide/explanation/algorithms/segmentation/semantic_segmentation.rst index a7dc5e43206..e631301702d 100644 --- a/docs/source/guide/explanation/algorithms/segmentation/semantic_segmentation.rst +++ b/docs/source/guide/explanation/algorithms/segmentation/semantic_segmentation.rst @@ -56,7 +56,7 @@ Models We support the following ready-to-use model recipes: +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+------------------------+---------------------+-----------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +======================================================================================================================================================================================+========================+=====================+=================+ | `Custom_Semantic_Segmentation_Lite-HRNet-s-mod2_OCR `_ | Lite-HRNet-s-mod2 | 1.44 | 3.2 | +--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+------------------------+---------------------+-----------------+ diff --git a/docs/source/guide/explanation/algorithms/visual_prompting/fine_tuning.rst b/docs/source/guide/explanation/algorithms/visual_prompting/fine_tuning.rst index 7bc211dccdf..32550c0e479 100644 --- a/docs/source/guide/explanation/algorithms/visual_prompting/fine_tuning.rst +++ b/docs/source/guide/explanation/algorithms/visual_prompting/fine_tuning.rst @@ -58,7 +58,7 @@ Models We support the following model recipes in experimental phase: +------------------------------------------------------------------------------------------------------------------------------------------------------------+--------------+---------------------+-----------------+ -| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +| Recipe ID | Name | Complexity (GFLOPs) | Model size (MB) | +============================================================================================================================================================+==============+=====================+=================+ | `Visual_Prompting_SAM_Tiny_ViT `_ | SAM_Tiny_ViT | 38.55 | 47 | +------------------------------------------------------------------------------------------------------------------------------------------------------------+--------------+---------------------+-----------------+ diff --git a/src/otx/engine/engine.py b/src/otx/engine/engine.py index e876581e768..7292617b665 100644 --- a/src/otx/engine/engine.py +++ b/src/otx/engine/engine.py @@ -65,23 +65,26 @@ class Engine: Example: The following examples show how to use the Engine class. - Auto-Configuration with data_root - >>> engine = Engine( - ... data_root=, - ... ) - - Create Engine with Custom OTXModel - >>> engine = Engine( - ... data_root=, - ... model=OTXModel(...), - ... checkpoint=, - ... ) - - Create Engine with Custom OTXDataModule - >>> engine = Engine( - ... model = OTXModel(...), - ... datamodule = OTXDataModule(...), - ... ) + Auto-Configuration with data_root:: + + engine = Engine( + data_root=, + ) + + Create Engine with Custom OTXModel:: + + engine = Engine( + data_root=, + model=OTXModel(...), + checkpoint=, + ) + + Create Engine with Custom OTXDataModule:: + + engine = Engine( + model = OTXModel(...), + datamodule = OTXDataModule(...), + ) """ def __init__( @@ -527,6 +530,7 @@ def optimize( ... datamodule=OTXDataModule(), ... checkpoint=, ... ) + CLI Usage: To optimize a model, run ```python @@ -743,17 +747,17 @@ def from_model_name( ... data_root=, ... ) - If you want to override configuration from default config - >>> overriding = { - ... "data.config.train_subset.batch_size": 2, - ... "data.config.test_subset.subset_name": "TESTING", - ... } - >>> engine = Engine( - ... model_name="atss_mobilenetv2", - ... task="DETECTION", - ... data_root=, - ... **overriding, - ... ) + If you want to override configuration from default config: + >>> overriding = { + ... "data.config.train_subset.batch_size": 2, + ... "data.config.test_subset.subset_name": "TESTING", + ... } + >>> engine = Engine( + ... model_name="atss_mobilenetv2", + ... task="DETECTION", + ... data_root=, + ... **overriding, + ... ) """ default_config = DEFAULT_CONFIG_PER_TASK.get(task) model_path = str(default_config).split("/")