From c01452253b03938fa21ce98a7e392a0f3bd8b65d Mon Sep 17 00:00:00 2001 From: lkaesberg Date: Wed, 16 Aug 2023 23:35:35 +0200 Subject: [PATCH] :bug: moved attention layer to top level to include it in the submission file --- layers/AttentionLayer.py => AttentionLayer.py | 0 classifier.py | 2 +- layers/__init__.py | 0 multitask_classifier.py | 2 +- 4 files changed, 2 insertions(+), 2 deletions(-) rename layers/AttentionLayer.py => AttentionLayer.py (100%) delete mode 100644 layers/__init__.py diff --git a/layers/AttentionLayer.py b/AttentionLayer.py similarity index 100% rename from layers/AttentionLayer.py rename to AttentionLayer.py diff --git a/classifier.py b/classifier.py index f75fcc6..69cfe87 100644 --- a/classifier.py +++ b/classifier.py @@ -10,7 +10,7 @@ from sklearn.metrics import classification_report, f1_score, recall_score, accuracy_score from torch.utils.tensorboard import SummaryWriter -from layers.AttentionLayer import AttentionLayer +from AttentionLayer import AttentionLayer # change it with respect to the original model from tokenizer import BertTokenizer from bert import BertModel diff --git a/layers/__init__.py b/layers/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/multitask_classifier.py b/multitask_classifier.py index d3f2808..107d6c1 100644 --- a/multitask_classifier.py +++ b/multitask_classifier.py @@ -10,7 +10,7 @@ from torch.utils.tensorboard import SummaryWriter from bert import BertModel -from layers.AttentionLayer import AttentionLayer +from AttentionLayer import AttentionLayer from optimizer import AdamW from tqdm import tqdm