From a663cc9a76a4344fc92360c432d2b50bbd30e5d5 Mon Sep 17 00:00:00 2001 From: Harim Kang Date: Fri, 11 Oct 2024 17:24:22 +0900 Subject: [PATCH] Fix out_features in HierarchicalCBAMClsHead (#4016) Fix out_features --- CHANGELOG.md | 4 ++++ src/otx/algo/classification/heads/hlabel_cls_head.py | 2 +- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f25e48e528f..7dbf01612eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -72,6 +72,10 @@ All notable changes to this project will be documented in this file. () - Fix config converter for tiling () +- Fix num_trials calculation on dataset length less than num_class + () +- Fix out_features in HierarchicalCBAMClsHead + () ## \[v2.1.0\] diff --git a/src/otx/algo/classification/heads/hlabel_cls_head.py b/src/otx/algo/classification/heads/hlabel_cls_head.py index f1041d06079..71268bb9ea0 100644 --- a/src/otx/algo/classification/heads/hlabel_cls_head.py +++ b/src/otx/algo/classification/heads/hlabel_cls_head.py @@ -355,7 +355,7 @@ def __init__( self.fc_superclass = nn.Linear(in_channels * self.step_size[0] * self.step_size[1], num_multiclass_heads) self.attention_fc = nn.Linear(num_multiclass_heads, in_channels * self.step_size[0] * self.step_size[1]) self.cbam = CBAM(in_channels) - self.fc_subclass = nn.Linear(in_channels * self.step_size[0] * self.step_size[1], num_single_label_classes) + self.fc_subclass = nn.Linear(in_channels * self.step_size[0] * self.step_size[1], num_classes) self._init_layers()