diff --git a/src/otx/core/data/dataset/detection.py b/src/otx/core/data/dataset/detection.py index 8094638b457..feba0d454b9 100644 --- a/src/otx/core/data/dataset/detection.py +++ b/src/otx/core/data/dataset/detection.py @@ -49,8 +49,9 @@ def _get_item_impl(self, index: int) -> DetDataEntity | None: bboxes, format=tv_tensors.BoundingBoxFormat.XYXY, canvas_size=img_shape, + dtype=torch.float32, ), - labels=torch.as_tensor([ann.label for ann in bbox_anns]), + labels=torch.as_tensor([ann.label for ann in bbox_anns], dtype=torch.long), ) return self._apply_transforms(entity) diff --git a/src/otx/core/data/dataset/instance_segmentation.py b/src/otx/core/data/dataset/instance_segmentation.py index 0a3abaeb877..d154ebd4ab2 100644 --- a/src/otx/core/data/dataset/instance_segmentation.py +++ b/src/otx/core/data/dataset/instance_segmentation.py @@ -75,6 +75,7 @@ def _get_item_impl(self, index: int) -> InstanceSegDataEntity | None: bboxes, format=tv_tensors.BoundingBoxFormat.XYXY, canvas_size=img_shape, + dtype=torch.float32, ), masks=tv_tensors.Mask(masks, dtype=torch.uint8), labels=torch.as_tensor(labels), diff --git a/src/otx/core/metrics/fmeasure.py b/src/otx/core/metrics/fmeasure.py index 6cc44850a2f..d3c71285f94 100644 --- a/src/otx/core/metrics/fmeasure.py +++ b/src/otx/core/metrics/fmeasure.py @@ -636,6 +636,8 @@ class FMeasure(Metric): IoU > threshold are reduced to one. This threshold can be determined automatically by setting `vary_nms_threshold` to True. + # TODO(someone): need to update for distriubted training. refer https://lightning.ai/docs/torchmetrics/stable/pages/implement.html + Args: label_info (int): Dataclass including label information. vary_nms_threshold (bool): if True the maximal F-measure is determined by optimizing for different NMS threshold diff --git a/src/otx/core/model/detection.py b/src/otx/core/model/detection.py index 437aa6b6e96..f2da68b2b7f 100644 --- a/src/otx/core/model/detection.py +++ b/src/otx/core/model/detection.py @@ -297,7 +297,7 @@ def _convert_pred_entity_to_compute_metric( "preds": [ { "boxes": bboxes.data, - "scores": scores, + "scores": scores.type(torch.float32), "labels": labels, } for bboxes, scores, labels in zip(